mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-26 15:56:18 +02:00
Merge remote-tracking branch 'nixos/master'
This commit is contained in:
commit
80c164b001
147 changed files with 2283 additions and 948 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -51,6 +51,8 @@ perl/Makefile.config
|
||||||
|
|
||||||
/src/nix/nix
|
/src/nix/nix
|
||||||
|
|
||||||
|
/src/nix/doc
|
||||||
|
|
||||||
# /src/nix-env/
|
# /src/nix-env/
|
||||||
/src/nix-env/nix-env
|
/src/nix-env/nix-env
|
||||||
|
|
||||||
|
@ -85,6 +87,7 @@ perl/Makefile.config
|
||||||
/tests/shell.drv
|
/tests/shell.drv
|
||||||
/tests/config.nix
|
/tests/config.nix
|
||||||
/tests/ca/config.nix
|
/tests/ca/config.nix
|
||||||
|
/tests/dyn-drv/config.nix
|
||||||
/tests/repl-result-out
|
/tests/repl-result-out
|
||||||
|
|
||||||
# /tests/lang/
|
# /tests/lang/
|
||||||
|
|
|
@ -30,6 +30,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
||||||
You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics.
|
You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics.
|
||||||
|
|
||||||
2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue.
|
2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue.
|
||||||
|
Pull requests addressing issues labeled ["idea approved"](https://github.com/NixOS/nix/labels/idea%20approved) are especially welcomed by maintainers and will receive prioritised review.
|
||||||
|
|
||||||
3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests.
|
3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests.
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ man-pages := $(foreach n, \
|
||||||
nix-prefetch-url.1 nix-channel.1 \
|
nix-prefetch-url.1 nix-channel.1 \
|
||||||
nix-hash.1 nix-copy-closure.1 \
|
nix-hash.1 nix-copy-closure.1 \
|
||||||
nix.conf.5 nix-daemon.8 \
|
nix.conf.5 nix-daemon.8 \
|
||||||
|
nix-profiles.5 \
|
||||||
, $(d)/$(n))
|
, $(d)/$(n))
|
||||||
|
|
||||||
# man pages for subcommands
|
# man pages for subcommands
|
||||||
|
@ -85,6 +86,12 @@ $(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
|
||||||
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
||||||
@rm $^.tmp
|
@rm $^.tmp
|
||||||
|
|
||||||
|
$(d)/nix-profiles.5: $(d)/src/command-ref/files/profiles.md
|
||||||
|
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
|
||||||
|
@cat $^ >> $^.tmp
|
||||||
|
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
||||||
|
@rm $^.tmp
|
||||||
|
|
||||||
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md
|
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md
|
||||||
@cp $< $@
|
@cp $< $@
|
||||||
@$(call process-includes,$@,$@)
|
@$(call process-includes,$@,$@)
|
||||||
|
|
|
@ -92,6 +92,11 @@
|
||||||
{{#include ./command-ref/new-cli/SUMMARY.md}}
|
{{#include ./command-ref/new-cli/SUMMARY.md}}
|
||||||
- [Files](command-ref/files.md)
|
- [Files](command-ref/files.md)
|
||||||
- [nix.conf](command-ref/conf-file.md)
|
- [nix.conf](command-ref/conf-file.md)
|
||||||
|
- [Profiles](command-ref/files/profiles.md)
|
||||||
|
- [manifest.nix](command-ref/files/manifest.nix.md)
|
||||||
|
- [manifest.json](command-ref/files/manifest.json.md)
|
||||||
|
- [Channels](command-ref/files/channels.md)
|
||||||
|
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
||||||
- [Architecture](architecture/architecture.md)
|
- [Architecture](architecture/architecture.md)
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/contributing.md)
|
||||||
|
|
|
@ -48,13 +48,13 @@ If the build passes and is deterministic, Nix will exit with a status
|
||||||
code of 0:
|
code of 0:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build ./deterministic.nix -A stable
|
$ nix-build ./deterministic.nix --attr stable
|
||||||
this derivation will be built:
|
this derivation will be built:
|
||||||
/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv
|
/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv
|
||||||
building '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
|
building '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
|
||||||
/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable
|
/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable
|
||||||
|
|
||||||
$ nix-build ./deterministic.nix -A stable --check
|
$ nix-build ./deterministic.nix --attr stable --check
|
||||||
checking outputs of '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
|
checking outputs of '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
|
||||||
/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable
|
/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable
|
||||||
```
|
```
|
||||||
|
@ -63,13 +63,13 @@ If the build is not deterministic, Nix will exit with a status code of
|
||||||
1:
|
1:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build ./deterministic.nix -A unstable
|
$ nix-build ./deterministic.nix --attr unstable
|
||||||
this derivation will be built:
|
this derivation will be built:
|
||||||
/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv
|
/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv
|
||||||
building '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
|
building '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
|
||||||
/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable
|
/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable
|
||||||
|
|
||||||
$ nix-build ./deterministic.nix -A unstable --check
|
$ nix-build ./deterministic.nix --attr unstable --check
|
||||||
checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
|
checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
|
||||||
error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may
|
error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may
|
||||||
not be deterministic: output '/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable' differs
|
not be deterministic: output '/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable' differs
|
||||||
|
@ -89,7 +89,7 @@ Using `--check` with `--keep-failed` will cause Nix to keep the second
|
||||||
build's output in a special, `.check` path:
|
build's output in a special, `.check` path:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build ./deterministic.nix -A unstable --check --keep-failed
|
$ nix-build ./deterministic.nix --attr unstable --check --keep-failed
|
||||||
checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
|
checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
|
||||||
note: keeping build directory '/tmp/nix-build-unstable.drv-0'
|
note: keeping build directory '/tmp/nix-build-unstable.drv-0'
|
||||||
error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may
|
error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may
|
||||||
|
|
|
@ -90,7 +90,7 @@ Then, restart the `nix-daemon`.
|
||||||
Build any derivation, for example:
|
Build any derivation, for example:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build -E '(import <nixpkgs> {}).writeText "example" (builtins.toString builtins.currentTime)'
|
$ nix-build --expr '(import <nixpkgs> {}).writeText "example" (builtins.toString builtins.currentTime)'
|
||||||
this derivation will be built:
|
this derivation will be built:
|
||||||
/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv
|
/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv
|
||||||
building '/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv'...
|
building '/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv'...
|
||||||
|
|
|
@ -71,9 +71,12 @@ Most Nix commands interpret the following environment variables:
|
||||||
Settings are separated by the newline character.
|
Settings are separated by the newline character.
|
||||||
|
|
||||||
- <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>\
|
- <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>\
|
||||||
Overrides the location of the user Nix configuration files to load
|
Overrides the location of the Nix user configuration files to load from.
|
||||||
from (defaults to the XDG spec locations). The variable is treated
|
|
||||||
as a list separated by the `:` token.
|
The default are the locations according to the [XDG Base Directory Specification].
|
||||||
|
See the [XDG Base Directories](#xdg-base-directories) sub-section for details.
|
||||||
|
|
||||||
|
The variable is treated as a list separated by the `:` token.
|
||||||
|
|
||||||
- <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>\
|
- <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>\
|
||||||
Use the specified directory to store temporary files. In particular,
|
Use the specified directory to store temporary files. In particular,
|
||||||
|
@ -103,15 +106,19 @@ Most Nix commands interpret the following environment variables:
|
||||||
384 MiB. Setting it to a low value reduces memory consumption, but
|
384 MiB. Setting it to a low value reduces memory consumption, but
|
||||||
will increase runtime due to the overhead of garbage collection.
|
will increase runtime due to the overhead of garbage collection.
|
||||||
|
|
||||||
## XDG Base Directory
|
## XDG Base Directories
|
||||||
|
|
||||||
New Nix commands conform to the [XDG Base Directory Specification], and use the following environment variables to determine locations of various state and configuration files:
|
Nix follows the [XDG Base Directory Specification].
|
||||||
|
|
||||||
|
For backwards compatibility, Nix commands will follow the standard only when [`use-xdg-base-directories`] is enabled.
|
||||||
|
[New Nix commands](@docroot@/command-ref/new-cli/nix.md) (experimental) conform to the standard by default.
|
||||||
|
|
||||||
|
The following environment variables are used to determine locations of various state and configuration files:
|
||||||
|
|
||||||
- [`XDG_CONFIG_HOME`]{#env-XDG_CONFIG_HOME} (default `~/.config`)
|
- [`XDG_CONFIG_HOME`]{#env-XDG_CONFIG_HOME} (default `~/.config`)
|
||||||
- [`XDG_STATE_HOME`]{#env-XDG_STATE_HOME} (default `~/.local/state`)
|
- [`XDG_STATE_HOME`]{#env-XDG_STATE_HOME} (default `~/.local/state`)
|
||||||
- [`XDG_CACHE_HOME`]{#env-XDG_CACHE_HOME} (default `~/.cache`)
|
- [`XDG_CACHE_HOME`]{#env-XDG_CACHE_HOME} (default `~/.cache`)
|
||||||
|
|
||||||
Classic Nix commands can also be made to follow this standard using the [`use-xdg-base-directories`] configuration option.
|
|
||||||
|
|
||||||
[XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
[XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories
|
[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories
|
||||||
|
|
26
doc/manual/src/command-ref/files/channels.md
Normal file
26
doc/manual/src/command-ref/files/channels.md
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
## Channels
|
||||||
|
|
||||||
|
A directory containing symlinks to Nix channels, managed by [`nix-channel`]:
|
||||||
|
|
||||||
|
- `$XDG_STATE_HOME/nix/profiles/channels` for regular users
|
||||||
|
- `$NIX_STATE_DIR/profiles/per-user/root/channels` for `root`
|
||||||
|
|
||||||
|
[`nix-channel`] uses a [profile](@docroot@/command-ref/files/profiles.md) to store channels.
|
||||||
|
This profile contains symlinks to the contents of those channels.
|
||||||
|
|
||||||
|
## Subscribed channels
|
||||||
|
|
||||||
|
The list of subscribed channels is stored in
|
||||||
|
|
||||||
|
- `~/.nix-channels`
|
||||||
|
- `$XDG_STATE_HOME/nix/channels` if [`use-xdg-base-directories`] is set to `true`
|
||||||
|
|
||||||
|
in the following format:
|
||||||
|
|
||||||
|
```
|
||||||
|
<url> <name>
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
[`nix-channel`]: @docroot@/command-ref/nix-channel.md
|
||||||
|
[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories
|
52
doc/manual/src/command-ref/files/default-nix-expression.md
Normal file
52
doc/manual/src/command-ref/files/default-nix-expression.md
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
## Default Nix expression
|
||||||
|
|
||||||
|
The source for the default [Nix expressions](@docroot@/language/index.md) used by [`nix-env`]:
|
||||||
|
|
||||||
|
- `~/.nix-defexpr`
|
||||||
|
- `$XDG_STATE_HOME/nix/defexpr` if [`use-xdg-base-directories`] is set to `true`.
|
||||||
|
|
||||||
|
It is loaded as follows:
|
||||||
|
|
||||||
|
- If the default expression is a file, it is loaded as a Nix expression.
|
||||||
|
- If the default expression is a directory containing a `default.nix` file, that `default.nix` file is loaded as a Nix expression.
|
||||||
|
- If the default expression is a directory without a `default.nix` file, then its contents (both files and subdirectories) are loaded as Nix expressions.
|
||||||
|
The expressions are combined into a single attribute set, each expression under an attribute with the same name as the original file or subdirectory.
|
||||||
|
Subdirectories without a `default.nix` file are traversed recursively in search of more Nix expressions, but the names of these intermediate directories are not added to the attribute paths of the default Nix expression.
|
||||||
|
|
||||||
|
Then, the resulting expression is interpreted like this:
|
||||||
|
|
||||||
|
- If the expression is an attribute set, it is used as the default Nix expression.
|
||||||
|
- If the expression is a function, an empty set is passed as argument and the return value is used as the default Nix expression.
|
||||||
|
|
||||||
|
|
||||||
|
For example, if the default expression contains two files, `foo.nix` and `bar.nix`, then the default Nix expression will be equivalent to
|
||||||
|
|
||||||
|
```nix
|
||||||
|
{
|
||||||
|
foo = import ~/.nix-defexpr/foo.nix;
|
||||||
|
bar = import ~/.nix-defexpr/bar.nix;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The file [`manifest.nix`](@docroot@/command-ref/files/manifest.nix.md) is always ignored.
|
||||||
|
|
||||||
|
The command [`nix-channel`] places a symlink to the user's current [channels profile](@docroot@/command-ref/files/channels.md) in this directory.
|
||||||
|
This makes all subscribed channels available as attributes in the default expression.
|
||||||
|
|
||||||
|
## User channel link
|
||||||
|
|
||||||
|
A symlink that ensures that [`nix-env`] can find your channels:
|
||||||
|
|
||||||
|
- `~/.nix-defexpr/channels`
|
||||||
|
- `$XDG_STATE_HOME/defexpr/channels` if [`use-xdg-base-directories`] is set to `true`.
|
||||||
|
|
||||||
|
This symlink points to:
|
||||||
|
|
||||||
|
- `$XDG_STATE_HOME/profiles/channels` for regular users
|
||||||
|
- `$NIX_STATE_DIR/profiles/per-user/root/channels` for `root`
|
||||||
|
|
||||||
|
In a multi-user installation, you may also have `~/.nix-defexpr/channels_root`, which links to the channels of the root user.[`nix-env`]: ../nix-env.md
|
||||||
|
|
||||||
|
[`nix-env`]: @docroot@/command-ref/nix-env.md
|
||||||
|
[`nix-channel`]: @docroot@/command-ref/nix-channel.md
|
||||||
|
[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories
|
45
doc/manual/src/command-ref/files/manifest.json.md
Normal file
45
doc/manual/src/command-ref/files/manifest.json.md
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
## `manifest.json`
|
||||||
|
|
||||||
|
The manifest file records the provenance of the packages that are installed in a [profile](./profiles.md) managed by [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) (experimental).
|
||||||
|
|
||||||
|
Here is an example of what the file might look like after installing `zoom-us` from Nixpkgs:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"active": true,
|
||||||
|
"attrPath": "legacyPackages.x86_64-linux.zoom-us",
|
||||||
|
"originalUrl": "flake:nixpkgs",
|
||||||
|
"storePaths": [
|
||||||
|
"/nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927"
|
||||||
|
],
|
||||||
|
"uri": "github:NixOS/nixpkgs/13d0c311e3ae923a00f734b43fd1d35b47d8943a"
|
||||||
|
},
|
||||||
|
…
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Each object in the array `elements` denotes an installed package and
|
||||||
|
has the following fields:
|
||||||
|
|
||||||
|
* `originalUrl`: The [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md) specified by
|
||||||
|
the user at the time of installation (e.g. `nixpkgs`). This is also
|
||||||
|
the flake reference that will be used by `nix profile upgrade`.
|
||||||
|
|
||||||
|
* `uri`: The locked flake reference to which `originalUrl` resolved.
|
||||||
|
|
||||||
|
* `attrPath`: The flake output attribute that provided this
|
||||||
|
package. Note that this is not necessarily the attribute that the
|
||||||
|
user specified, but the one resulting from applying the default
|
||||||
|
attribute paths and prefixes; for instance, `hello` might resolve to
|
||||||
|
`packages.x86_64-linux.hello` and the empty string to
|
||||||
|
`packages.x86_64-linux.default`.
|
||||||
|
|
||||||
|
* `storePath`: The paths in the Nix store containing the package.
|
||||||
|
|
||||||
|
* `active`: Whether the profile contains symlinks to the files of this
|
||||||
|
package. If set to false, the package is kept in the Nix store, but
|
||||||
|
is not "visible" in the profile's symlink tree.
|
128
doc/manual/src/command-ref/files/manifest.nix.md
Normal file
128
doc/manual/src/command-ref/files/manifest.nix.md
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
## `manifest.nix`
|
||||||
|
|
||||||
|
The manifest file records the provenance of the packages that are installed in a [profile](./profiles.md) managed by [`nix-env`](@docroot@/command-ref/nix-env.md).
|
||||||
|
|
||||||
|
Here is an example of how this file might look like after installing `hello` from Nixpkgs:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
[{
|
||||||
|
meta = {
|
||||||
|
available = true;
|
||||||
|
broken = false;
|
||||||
|
changelog =
|
||||||
|
"https://git.savannah.gnu.org/cgit/hello.git/plain/NEWS?h=v2.12.1";
|
||||||
|
description = "A program that produces a familiar, friendly greeting";
|
||||||
|
homepage = "https://www.gnu.org/software/hello/manual/";
|
||||||
|
insecure = false;
|
||||||
|
license = {
|
||||||
|
deprecated = false;
|
||||||
|
free = true;
|
||||||
|
fullName = "GNU General Public License v3.0 or later";
|
||||||
|
redistributable = true;
|
||||||
|
shortName = "gpl3Plus";
|
||||||
|
spdxId = "GPL-3.0-or-later";
|
||||||
|
url = "https://spdx.org/licenses/GPL-3.0-or-later.html";
|
||||||
|
};
|
||||||
|
longDescription = ''
|
||||||
|
GNU Hello is a program that prints "Hello, world!" when you run it.
|
||||||
|
It is fully customizable.
|
||||||
|
'';
|
||||||
|
maintainers = [{
|
||||||
|
email = "edolstra+nixpkgs@gmail.com";
|
||||||
|
github = "edolstra";
|
||||||
|
githubId = 1148549;
|
||||||
|
name = "Eelco Dolstra";
|
||||||
|
}];
|
||||||
|
name = "hello-2.12.1";
|
||||||
|
outputsToInstall = [ "out" ];
|
||||||
|
platforms = [
|
||||||
|
"i686-cygwin"
|
||||||
|
"x86_64-cygwin"
|
||||||
|
"x86_64-darwin"
|
||||||
|
"i686-darwin"
|
||||||
|
"aarch64-darwin"
|
||||||
|
"armv7a-darwin"
|
||||||
|
"i686-freebsd13"
|
||||||
|
"x86_64-freebsd13"
|
||||||
|
"aarch64-genode"
|
||||||
|
"i686-genode"
|
||||||
|
"x86_64-genode"
|
||||||
|
"x86_64-solaris"
|
||||||
|
"js-ghcjs"
|
||||||
|
"aarch64-linux"
|
||||||
|
"armv5tel-linux"
|
||||||
|
"armv6l-linux"
|
||||||
|
"armv7a-linux"
|
||||||
|
"armv7l-linux"
|
||||||
|
"i686-linux"
|
||||||
|
"m68k-linux"
|
||||||
|
"microblaze-linux"
|
||||||
|
"microblazeel-linux"
|
||||||
|
"mipsel-linux"
|
||||||
|
"mips64el-linux"
|
||||||
|
"powerpc64-linux"
|
||||||
|
"powerpc64le-linux"
|
||||||
|
"riscv32-linux"
|
||||||
|
"riscv64-linux"
|
||||||
|
"s390-linux"
|
||||||
|
"s390x-linux"
|
||||||
|
"x86_64-linux"
|
||||||
|
"mmix-mmixware"
|
||||||
|
"aarch64-netbsd"
|
||||||
|
"armv6l-netbsd"
|
||||||
|
"armv7a-netbsd"
|
||||||
|
"armv7l-netbsd"
|
||||||
|
"i686-netbsd"
|
||||||
|
"m68k-netbsd"
|
||||||
|
"mipsel-netbsd"
|
||||||
|
"powerpc-netbsd"
|
||||||
|
"riscv32-netbsd"
|
||||||
|
"riscv64-netbsd"
|
||||||
|
"x86_64-netbsd"
|
||||||
|
"aarch64_be-none"
|
||||||
|
"aarch64-none"
|
||||||
|
"arm-none"
|
||||||
|
"armv6l-none"
|
||||||
|
"avr-none"
|
||||||
|
"i686-none"
|
||||||
|
"microblaze-none"
|
||||||
|
"microblazeel-none"
|
||||||
|
"msp430-none"
|
||||||
|
"or1k-none"
|
||||||
|
"m68k-none"
|
||||||
|
"powerpc-none"
|
||||||
|
"powerpcle-none"
|
||||||
|
"riscv32-none"
|
||||||
|
"riscv64-none"
|
||||||
|
"rx-none"
|
||||||
|
"s390-none"
|
||||||
|
"s390x-none"
|
||||||
|
"vc4-none"
|
||||||
|
"x86_64-none"
|
||||||
|
"i686-openbsd"
|
||||||
|
"x86_64-openbsd"
|
||||||
|
"x86_64-redox"
|
||||||
|
"wasm64-wasi"
|
||||||
|
"wasm32-wasi"
|
||||||
|
"x86_64-windows"
|
||||||
|
"i686-windows"
|
||||||
|
];
|
||||||
|
position =
|
||||||
|
"/nix/store/7niq32w715567hbph0q13m5lqna64c1s-nixos-unstable.tar.gz/nixos-unstable.tar.gz/pkgs/applications/misc/hello/default.nix:34";
|
||||||
|
unfree = false;
|
||||||
|
unsupported = false;
|
||||||
|
};
|
||||||
|
name = "hello-2.12.1";
|
||||||
|
out = {
|
||||||
|
outPath = "/nix/store/260q5867crm1xjs4khgqpl6vr9kywql1-hello-2.12.1";
|
||||||
|
};
|
||||||
|
outPath = "/nix/store/260q5867crm1xjs4khgqpl6vr9kywql1-hello-2.12.1";
|
||||||
|
outputs = [ "out" ];
|
||||||
|
system = "x86_64-linux";
|
||||||
|
type = "derivation";
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
Each element in this list corresponds to an installed package.
|
||||||
|
It incorporates some attributes of the original derivation, including `meta`, `name`, `out`, `outPath`, `outputs`, `system`.
|
||||||
|
This information is used by Nix for querying and updating the package.
|
74
doc/manual/src/command-ref/files/profiles.md
Normal file
74
doc/manual/src/command-ref/files/profiles.md
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
## Profiles
|
||||||
|
|
||||||
|
A directory that contains links to profiles managed by [`nix-env`] and [`nix profile`]:
|
||||||
|
|
||||||
|
- `$XDG_STATE_HOME/nix/profiles` for regular users
|
||||||
|
- `$NIX_STATE_DIR/profiles/per-user/root` if the user is `root`
|
||||||
|
|
||||||
|
A profile is a directory of symlinks to files in the Nix store.
|
||||||
|
|
||||||
|
### Filesystem layout
|
||||||
|
|
||||||
|
Profiles are versioned as follows. When using a profile named *path*, *path* is a symlink to *path*`-`*N*`-link`, where *N* is the version of the profile.
|
||||||
|
In turn, *path*`-`*N*`-link` is a symlink to a path in the Nix store.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ ls -l ~alice/.local/state/nix/profiles/profile*
|
||||||
|
lrwxrwxrwx 1 alice users 14 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile -> profile-7-link
|
||||||
|
lrwxrwxrwx 1 alice users 51 Oct 28 16:18 /home/alice/.local/state/nix/profiles/profile-5-link -> /nix/store/q69xad13ghpf7ir87h0b2gd28lafjj1j-profile
|
||||||
|
lrwxrwxrwx 1 alice users 51 Oct 29 13:20 /home/alice/.local/state/nix/profiles/profile-6-link -> /nix/store/6bvhpysd7vwz7k3b0pndn7ifi5xr32dg-profile
|
||||||
|
lrwxrwxrwx 1 alice users 51 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile-7-link -> /nix/store/mp0x6xnsg0b8qhswy6riqvimai4gm677-profile
|
||||||
|
```
|
||||||
|
|
||||||
|
Each of these symlinks is a root for the Nix garbage collector.
|
||||||
|
|
||||||
|
The contents of the store path corresponding to each version of the
|
||||||
|
profile is a tree of symlinks to the files of the installed packages,
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ ll -R ~eelco/.local/state/nix/profiles/profile-7-link/
|
||||||
|
/home/eelco/.local/state/nix/profiles/profile-7-link/:
|
||||||
|
total 20
|
||||||
|
dr-xr-xr-x 2 root root 4096 Jan 1 1970 bin
|
||||||
|
-r--r--r-- 2 root root 1402 Jan 1 1970 manifest.nix
|
||||||
|
dr-xr-xr-x 4 root root 4096 Jan 1 1970 share
|
||||||
|
|
||||||
|
/home/eelco/.local/state/nix/profiles/profile-7-link/bin:
|
||||||
|
total 20
|
||||||
|
lrwxrwxrwx 5 root root 79 Jan 1 1970 chromium -> /nix/store/ijm5k0zqisvkdwjkc77mb9qzb35xfi4m-chromium-86.0.4240.111/bin/chromium
|
||||||
|
lrwxrwxrwx 7 root root 87 Jan 1 1970 spotify -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/bin/spotify
|
||||||
|
lrwxrwxrwx 3 root root 79 Jan 1 1970 zoom-us -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/bin/zoom-us
|
||||||
|
|
||||||
|
/home/eelco/.local/state/nix/profiles/profile-7-link/share/applications:
|
||||||
|
total 12
|
||||||
|
lrwxrwxrwx 4 root root 120 Jan 1 1970 chromium-browser.desktop -> /nix/store/4cf803y4vzfm3gyk3vzhzb2327v0kl8a-chromium-unwrapped-86.0.4240.111/share/applications/chromium-browser.desktop
|
||||||
|
lrwxrwxrwx 7 root root 110 Jan 1 1970 spotify.desktop -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/share/applications/spotify.desktop
|
||||||
|
lrwxrwxrwx 3 root root 107 Jan 1 1970 us.zoom.Zoom.desktop -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/share/applications/us.zoom.Zoom.desktop
|
||||||
|
|
||||||
|
…
|
||||||
|
```
|
||||||
|
|
||||||
|
Each profile version contains a manifest file:
|
||||||
|
- [`manifest.nix`](@docroot@/command-ref/files/manifest.nix.md) used by [`nix-env`](@docroot@/command-ref/nix-env.md).
|
||||||
|
- [`manifest.json`](@docroot@/command-ref/files/manifest.json.md) used by [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) (experimental).
|
||||||
|
|
||||||
|
## User profile link
|
||||||
|
|
||||||
|
A symbolic link to the user's current profile:
|
||||||
|
|
||||||
|
- `~/.nix-profile`
|
||||||
|
- `$XDG_STATE_HOME/nix/profile` if [`use-xdg-base-directories`] is set to `true`.
|
||||||
|
|
||||||
|
By default, this symlink points to:
|
||||||
|
|
||||||
|
- `$XDG_STATE_HOME/nix/profiles/profile` for regular users
|
||||||
|
- `$NIX_STATE_DIR/profiles/per-user/root/profile` for `root`
|
||||||
|
|
||||||
|
The `PATH` environment variable should include `/bin` subdirectory of the profile link (e.g. `~/.nix-profile/bin`) for the user environment to be visible to the user.
|
||||||
|
The [installer](@docroot@/installation/installing-binary.md) sets this up by default, unless you enable [`use-xdg-base-directories`].
|
||||||
|
|
||||||
|
[`nix-env`]: @docroot@/command-ref/nix-env.md
|
||||||
|
[`nix profile`]: @docroot@/command-ref/new-cli/nix3-profile.md
|
||||||
|
[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories
|
|
@ -76,7 +76,7 @@ except for `--arg` and `--attr` / `-A` which are passed to `nix-instantiate`.
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build '<nixpkgs>' -A firefox
|
$ nix-build '<nixpkgs>' --attr firefox
|
||||||
store derivation is /nix/store/qybprl8sz2lc...-firefox-1.5.0.7.drv
|
store derivation is /nix/store/qybprl8sz2lc...-firefox-1.5.0.7.drv
|
||||||
/nix/store/d18hyl92g30l...-firefox-1.5.0.7
|
/nix/store/d18hyl92g30l...-firefox-1.5.0.7
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ If a derivation has multiple outputs, `nix-build` will build the default
|
||||||
(first) output. You can also build all outputs:
|
(first) output. You can also build all outputs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build '<nixpkgs>' -A openssl.all
|
$ nix-build '<nixpkgs>' --attr openssl.all
|
||||||
```
|
```
|
||||||
|
|
||||||
This will create a symlink for each output named `result-outputname`.
|
This will create a symlink for each output named `result-outputname`.
|
||||||
|
@ -101,7 +101,7 @@ outputs `out`, `bin` and `man`, `nix-build` will create symlinks
|
||||||
specific output:
|
specific output:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build '<nixpkgs>' -A openssl.man
|
$ nix-build '<nixpkgs>' --attr openssl.man
|
||||||
```
|
```
|
||||||
|
|
||||||
This will create a symlink `result-man`.
|
This will create a symlink `result-man`.
|
||||||
|
@ -109,7 +109,7 @@ This will create a symlink `result-man`.
|
||||||
Build a Nix expression given on the command line:
|
Build a Nix expression given on the command line:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build -E 'with import <nixpkgs> { }; runCommand "foo" { } "echo bar > $out"'
|
$ nix-build --expr 'with import <nixpkgs> { }; runCommand "foo" { } "echo bar > $out"'
|
||||||
$ cat ./result
|
$ cat ./result
|
||||||
bar
|
bar
|
||||||
```
|
```
|
||||||
|
@ -118,5 +118,5 @@ Build the GNU Hello package from the latest revision of the master
|
||||||
branch of Nixpkgs:
|
branch of Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build https://github.com/NixOS/nixpkgs/archive/master.tar.gz -A hello
|
$ nix-build https://github.com/NixOS/nixpkgs/archive/master.tar.gz --attr hello
|
||||||
```
|
```
|
||||||
|
|
|
@ -22,6 +22,9 @@ This command has the following operations:
|
||||||
channels. If *name* is omitted, it defaults to the last component of
|
channels. If *name* is omitted, it defaults to the last component of
|
||||||
*url*, with the suffixes `-stable` or `-unstable` removed.
|
*url*, with the suffixes `-stable` or `-unstable` removed.
|
||||||
|
|
||||||
|
A channel URL must point to a directory containing a file `nixexprs.tar.gz`.
|
||||||
|
At the top level, that tarball must contain a single directory with a `default.nix` file that serves as the channel’s entry point.
|
||||||
|
|
||||||
- `--remove` *name*\
|
- `--remove` *name*\
|
||||||
Removes the channel named *name* from the list of subscribed
|
Removes the channel named *name* from the list of subscribed
|
||||||
channels.
|
channels.
|
||||||
|
@ -49,6 +52,12 @@ The list of subscribed channels is stored in `~/.nix-channels`.
|
||||||
|
|
||||||
{{#include ./env-common.md}}
|
{{#include ./env-common.md}}
|
||||||
|
|
||||||
|
# Files
|
||||||
|
|
||||||
|
`nix-channel` operates on the following files.
|
||||||
|
|
||||||
|
{{#include ./files/channels.md}}
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
To subscribe to the Nixpkgs channel and install the GNU Hello package:
|
To subscribe to the Nixpkgs channel and install the GNU Hello package:
|
||||||
|
@ -56,45 +65,18 @@ To subscribe to the Nixpkgs channel and install the GNU Hello package:
|
||||||
```console
|
```console
|
||||||
$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable
|
$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable
|
||||||
$ nix-channel --update
|
$ nix-channel --update
|
||||||
$ nix-env -iA nixpkgs.hello
|
$ nix-env --install --attr nixpkgs.hello
|
||||||
```
|
```
|
||||||
|
|
||||||
You can revert channel updates using `--rollback`:
|
You can revert channel updates using `--rollback`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
|
$ nix-instantiate --eval --expr '(import <nixpkgs> {}).lib.version'
|
||||||
"14.04.527.0e935f1"
|
"14.04.527.0e935f1"
|
||||||
|
|
||||||
$ nix-channel --rollback
|
$ nix-channel --rollback
|
||||||
switching from generation 483 to 482
|
switching from generation 483 to 482
|
||||||
|
|
||||||
$ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
|
$ nix-instantiate --eval --expr '(import <nixpkgs> {}).lib.version'
|
||||||
"14.04.526.dbadfad"
|
"14.04.526.dbadfad"
|
||||||
```
|
```
|
||||||
|
|
||||||
# Files
|
|
||||||
|
|
||||||
- `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/channels`\
|
|
||||||
`nix-channel` uses a `nix-env` profile to keep track of previous
|
|
||||||
versions of the subscribed channels. Every time you run `nix-channel
|
|
||||||
--update`, a new channel generation (that is, a symlink to the
|
|
||||||
channel Nix expressions in the Nix store) is created. This enables
|
|
||||||
`nix-channel --rollback` to revert to previous versions.
|
|
||||||
|
|
||||||
- `~/.nix-defexpr/channels`\
|
|
||||||
This is a symlink to
|
|
||||||
`${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/channels`. It ensures that
|
|
||||||
`nix-env` can find your channels. In a multi-user installation, you
|
|
||||||
may also have `~/.nix-defexpr/channels_root`, which links to the
|
|
||||||
channels of the root user.
|
|
||||||
|
|
||||||
# Channel format
|
|
||||||
|
|
||||||
A channel URL should point to a directory containing the following
|
|
||||||
files:
|
|
||||||
|
|
||||||
- `nixexprs.tar.xz`\
|
|
||||||
A tarball containing Nix expressions and files referenced by them
|
|
||||||
(such as build scripts and patches). At the top level, the tarball
|
|
||||||
should contain a single directory. That directory must contain a
|
|
||||||
file `default.nix` that serves as the channel’s “entry point”.
|
|
||||||
|
|
|
@ -87,5 +87,5 @@ environment:
|
||||||
```console
|
```console
|
||||||
$ nix-copy-closure --from alice@itchy.labs \
|
$ nix-copy-closure --from alice@itchy.labs \
|
||||||
/nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4
|
/nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4
|
||||||
$ nix-env -i /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4
|
$ nix-env --install /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4
|
||||||
```
|
```
|
||||||
|
|
|
@ -49,7 +49,7 @@ These pages can be viewed offline:
|
||||||
|
|
||||||
# Selectors
|
# Selectors
|
||||||
|
|
||||||
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
|
Several commands, such as `nix-env --query ` and `nix-env --install `, take a list of
|
||||||
arguments that specify the packages on which to operate. These are
|
arguments that specify the packages on which to operate. These are
|
||||||
extended regular expressions that must match the entire name of the
|
extended regular expressions that must match the entire name of the
|
||||||
package. (For details on regular expressions, see **regex**(7).) The match is
|
package. (For details on regular expressions, see **regex**(7).) The match is
|
||||||
|
@ -83,46 +83,8 @@ match. Here are some examples:
|
||||||
|
|
||||||
# Files
|
# Files
|
||||||
|
|
||||||
- `~/.nix-defexpr`\
|
`nix-env` operates on the following files.
|
||||||
The source for the default Nix expressions used by the
|
|
||||||
`--install`, `--upgrade`, and `--query --available` operations to
|
|
||||||
obtain derivations. The `--file` option may be used to override
|
|
||||||
this default.
|
|
||||||
|
|
||||||
If `~/.nix-defexpr` is a file, it is loaded as a Nix expression. If
|
{{#include ./files/default-nix-expression.md}}
|
||||||
the expression is a set, it is used as the default Nix expression.
|
|
||||||
If the expression is a function, an empty set is passed as argument
|
|
||||||
and the return value is used as the default Nix expression.
|
|
||||||
|
|
||||||
If `~/.nix-defexpr` is a directory containing a `default.nix` file,
|
{{#include ./files/profiles.md}}
|
||||||
that file is loaded as in the above paragraph.
|
|
||||||
|
|
||||||
If `~/.nix-defexpr` is a directory without a `default.nix` file,
|
|
||||||
then its contents (both files and subdirectories) are loaded as Nix
|
|
||||||
expressions. The expressions are combined into a single set, each
|
|
||||||
expression under an attribute with the same name as the original
|
|
||||||
file or subdirectory.
|
|
||||||
|
|
||||||
For example, if `~/.nix-defexpr` contains two files, `foo.nix` and
|
|
||||||
`bar.nix`, then the default Nix expression will essentially be
|
|
||||||
|
|
||||||
```nix
|
|
||||||
{
|
|
||||||
foo = import ~/.nix-defexpr/foo.nix;
|
|
||||||
bar = import ~/.nix-defexpr/bar.nix;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The file `manifest.nix` is always ignored. Subdirectories without a
|
|
||||||
`default.nix` file are traversed recursively in search of more Nix
|
|
||||||
expressions, but the names of these intermediate directories are not
|
|
||||||
added to the attribute paths of the default Nix expression.
|
|
||||||
|
|
||||||
The command `nix-channel` places symlinks to the downloaded Nix
|
|
||||||
expressions from each subscribed channel in this directory.
|
|
||||||
|
|
||||||
- `~/.nix-profile`\
|
|
||||||
A symbolic link to the user's current profile. By default, this
|
|
||||||
symlink points to `prefix/var/nix/profiles/default`. The `PATH`
|
|
||||||
environment variable should include `~/.nix-profile/bin` for the
|
|
||||||
user environment to be visible to the user.
|
|
||||||
|
|
|
@ -41,6 +41,6 @@ $ nix-env --delete-generations 30d
|
||||||
```
|
```
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -p other_profile --delete-generations old
|
$ nix-env --profile other_profile --delete-generations old
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ a number of possible ways:
|
||||||
then the derivation with the highest version will be installed.
|
then the derivation with the highest version will be installed.
|
||||||
|
|
||||||
You can force the installation of multiple derivations with the same
|
You can force the installation of multiple derivations with the same
|
||||||
name by being specific about the versions. For instance, `nix-env -i
|
name by being specific about the versions. For instance, `nix-env --install
|
||||||
gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
|
gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
|
||||||
probably cause a user environment conflict\!).
|
probably cause a user environment conflict\!).
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ a number of possible ways:
|
||||||
paths* that select attributes from the top-level Nix
|
paths* that select attributes from the top-level Nix
|
||||||
expression. This is faster than using derivation names and
|
expression. This is faster than using derivation names and
|
||||||
unambiguous. To find out the attribute paths of available
|
unambiguous. To find out the attribute paths of available
|
||||||
packages, use `nix-env -qaP`.
|
packages, use `nix-env --query --available --attr-path `.
|
||||||
|
|
||||||
- If `--from-profile` *path* is given, *args* is a set of names
|
- If `--from-profile` *path* is given, *args* is a set of names
|
||||||
denoting installed store paths in the profile *path*. This is an
|
denoting installed store paths in the profile *path*. This is an
|
||||||
|
@ -87,7 +87,7 @@ a number of possible ways:
|
||||||
|
|
||||||
- `--remove-all` / `-r`\
|
- `--remove-all` / `-r`\
|
||||||
Remove all previously installed packages first. This is equivalent
|
Remove all previously installed packages first. This is equivalent
|
||||||
to running `nix-env -e '.*'` first, except that everything happens
|
to running `nix-env --uninstall '.*'` first, except that everything happens
|
||||||
in a single transaction.
|
in a single transaction.
|
||||||
|
|
||||||
{{#include ./opt-common.md}}
|
{{#include ./opt-common.md}}
|
||||||
|
@ -103,9 +103,9 @@ a number of possible ways:
|
||||||
To install a package using a specific attribute path from the active Nix expression:
|
To install a package using a specific attribute path from the active Nix expression:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA gcc40mips
|
$ nix-env --install --attr gcc40mips
|
||||||
installing `gcc-4.0.2'
|
installing `gcc-4.0.2'
|
||||||
$ nix-env -iA xorg.xorgserver
|
$ nix-env --install --attr xorg.xorgserver
|
||||||
installing `xorg-server-1.2.0'
|
installing `xorg-server-1.2.0'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -133,32 +133,32 @@ installing `gcc-3.3.2'
|
||||||
To install all derivations in the Nix expression `foo.nix`:
|
To install all derivations in the Nix expression `foo.nix`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -f ~/foo.nix -i '.*'
|
$ nix-env --file ~/foo.nix --install '.*'
|
||||||
```
|
```
|
||||||
|
|
||||||
To copy the store path with symbolic name `gcc` from another profile:
|
To copy the store path with symbolic name `gcc` from another profile:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
|
$ nix-env --install --from-profile /nix/var/nix/profiles/foo gcc
|
||||||
```
|
```
|
||||||
|
|
||||||
To install a specific [store derivation] (typically created by
|
To install a specific [store derivation] (typically created by
|
||||||
`nix-instantiate`):
|
`nix-instantiate`):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i /nix/store/fibjb1bfbpm5mrsxc4mh2d8n37sxh91i-gcc-3.4.3.drv
|
$ nix-env --install /nix/store/fibjb1bfbpm5mrsxc4mh2d8n37sxh91i-gcc-3.4.3.drv
|
||||||
```
|
```
|
||||||
|
|
||||||
To install a specific output path:
|
To install a specific output path:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i /nix/store/y3cgx0xj1p4iv9x0pnnmdhr8iyg741vk-gcc-3.4.3
|
$ nix-env --install /nix/store/y3cgx0xj1p4iv9x0pnnmdhr8iyg741vk-gcc-3.4.3
|
||||||
```
|
```
|
||||||
|
|
||||||
To install from a Nix expression specified on the command-line:
|
To install from a Nix expression specified on the command-line:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -f ./foo.nix -i -E \
|
$ nix-env --file ./foo.nix --install --expr \
|
||||||
'f: (f {system = "i686-linux";}).subversionWithJava'
|
'f: (f {system = "i686-linux";}).subversionWithJava'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -170,7 +170,7 @@ function defined in `./foo.nix`.
|
||||||
A dry-run tells you which paths will be downloaded or built from source:
|
A dry-run tells you which paths will be downloaded or built from source:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -f '<nixpkgs>' -iA hello --dry-run
|
$ nix-env --file '<nixpkgs>' --install --attr hello --dry-run
|
||||||
(dry run; not doing anything)
|
(dry run; not doing anything)
|
||||||
installing ‘hello-2.10’
|
installing ‘hello-2.10’
|
||||||
this path will be fetched (0.04 MiB download, 0.19 MiB unpacked):
|
this path will be fetched (0.04 MiB download, 0.19 MiB unpacked):
|
||||||
|
@ -182,6 +182,6 @@ To install Firefox from the latest revision in the Nixpkgs/NixOS 14.12
|
||||||
channel:
|
channel:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -f https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz -iA firefox
|
$ nix-env --file https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz --install --attr firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -137,7 +137,7 @@ derivation is shown unless `--no-name` is specified.
|
||||||
To show installed packages:
|
To show installed packages:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -q
|
$ nix-env --query
|
||||||
bison-1.875c
|
bison-1.875c
|
||||||
docbook-xml-4.2
|
docbook-xml-4.2
|
||||||
firefox-1.0.4
|
firefox-1.0.4
|
||||||
|
@ -149,7 +149,7 @@ ORBit2-2.8.3
|
||||||
To show available packages:
|
To show available packages:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa
|
$ nix-env --query --available
|
||||||
firefox-1.0.7
|
firefox-1.0.7
|
||||||
GConf-2.4.0.1
|
GConf-2.4.0.1
|
||||||
MPlayer-1.0pre7
|
MPlayer-1.0pre7
|
||||||
|
@ -160,7 +160,7 @@ ORBit2-2.8.3
|
||||||
To show the status of available packages:
|
To show the status of available packages:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qas
|
$ nix-env --query --available --status
|
||||||
-P- firefox-1.0.7 (not installed but present)
|
-P- firefox-1.0.7 (not installed but present)
|
||||||
--S GConf-2.4.0.1 (not present, but there is a substitute for fast installation)
|
--S GConf-2.4.0.1 (not present, but there is a substitute for fast installation)
|
||||||
--S MPlayer-1.0pre3 (i.e., this is not the installed MPlayer, even though the version is the same!)
|
--S MPlayer-1.0pre3 (i.e., this is not the installed MPlayer, even though the version is the same!)
|
||||||
|
@ -171,14 +171,14 @@ IP- ORBit2-2.8.3 (installed and by definition present)
|
||||||
To show available packages in the Nix expression `foo.nix`:
|
To show available packages in the Nix expression `foo.nix`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -f ./foo.nix -qa
|
$ nix-env --file ./foo.nix --query --available
|
||||||
foo-1.2.3
|
foo-1.2.3
|
||||||
```
|
```
|
||||||
|
|
||||||
To compare installed versions to what’s available:
|
To compare installed versions to what’s available:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qc
|
$ nix-env --query --compare-versions
|
||||||
...
|
...
|
||||||
acrobat-reader-7.0 - ? (package is not available at all)
|
acrobat-reader-7.0 - ? (package is not available at all)
|
||||||
autoconf-2.59 = 2.59 (same version)
|
autoconf-2.59 = 2.59 (same version)
|
||||||
|
@ -189,7 +189,7 @@ firefox-1.0.4 < 1.0.7 (a more recent version is available)
|
||||||
To show all packages with “`zip`” in the name:
|
To show all packages with “`zip`” in the name:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa '.*zip.*'
|
$ nix-env --query --available '.*zip.*'
|
||||||
bzip2-1.0.6
|
bzip2-1.0.6
|
||||||
gzip-1.6
|
gzip-1.6
|
||||||
zip-3.0
|
zip-3.0
|
||||||
|
@ -199,7 +199,7 @@ zip-3.0
|
||||||
To show all packages with “`firefox`” or “`chromium`” in the name:
|
To show all packages with “`firefox`” or “`chromium`” in the name:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa '.*(firefox|chromium).*'
|
$ nix-env --query --available '.*(firefox|chromium).*'
|
||||||
chromium-37.0.2062.94
|
chromium-37.0.2062.94
|
||||||
chromium-beta-38.0.2125.24
|
chromium-beta-38.0.2125.24
|
||||||
firefox-32.0.3
|
firefox-32.0.3
|
||||||
|
@ -210,6 +210,6 @@ firefox-with-plugins-13.0.1
|
||||||
To show all packages in the latest revision of the Nixpkgs repository:
|
To show all packages in the latest revision of the Nixpkgs repository:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -f https://github.com/NixOS/nixpkgs/archive/master.tar.gz -qa
|
$ nix-env --file https://github.com/NixOS/nixpkgs/archive/master.tar.gz --query --available
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -46,16 +46,16 @@ To prevent the currently installed Firefox from being upgraded:
|
||||||
$ nix-env --set-flag keep true firefox
|
$ nix-env --set-flag keep true firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
After this, `nix-env -u` will ignore Firefox.
|
After this, `nix-env --upgrade ` will ignore Firefox.
|
||||||
|
|
||||||
To disable the currently installed Firefox, then install a new Firefox
|
To disable the currently installed Firefox, then install a new Firefox
|
||||||
while the old remains part of the profile:
|
while the old remains part of the profile:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -q
|
$ nix-env --query
|
||||||
firefox-2.0.0.9 (the current one)
|
firefox-2.0.0.9 (the current one)
|
||||||
|
|
||||||
$ nix-env --preserve-installed -i firefox-2.0.0.11
|
$ nix-env --preserve-installed --install firefox-2.0.0.11
|
||||||
installing `firefox-2.0.0.11'
|
installing `firefox-2.0.0.11'
|
||||||
building path(s) `/nix/store/myy0y59q3ig70dgq37jqwg1j0rsapzsl-user-environment'
|
building path(s) `/nix/store/myy0y59q3ig70dgq37jqwg1j0rsapzsl-user-environment'
|
||||||
collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox'
|
collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox'
|
||||||
|
@ -65,10 +65,10 @@ collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox'
|
||||||
$ nix-env --set-flag active false firefox
|
$ nix-env --set-flag active false firefox
|
||||||
setting flag on `firefox-2.0.0.9'
|
setting flag on `firefox-2.0.0.9'
|
||||||
|
|
||||||
$ nix-env --preserve-installed -i firefox-2.0.0.11
|
$ nix-env --preserve-installed --install firefox-2.0.0.11
|
||||||
installing `firefox-2.0.0.11'
|
installing `firefox-2.0.0.11'
|
||||||
|
|
||||||
$ nix-env -q
|
$ nix-env --query
|
||||||
firefox-2.0.0.11 (the enabled one)
|
firefox-2.0.0.11 (the enabled one)
|
||||||
firefox-2.0.0.9 (the disabled one)
|
firefox-2.0.0.9 (the disabled one)
|
||||||
```
|
```
|
||||||
|
|
|
@ -25,6 +25,6 @@ The following updates a profile such that its current generation will
|
||||||
contain just Firefox:
|
contain just Firefox:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -p /nix/var/nix/profiles/browser --set firefox
|
$ nix-env --profile /nix/var/nix/profiles/browser --set firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ Switching will fail if the specified generation does not exist.
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -G 42
|
$ nix-env --switch-generation 42
|
||||||
switching from generation 50 to 42
|
switching from generation 50 to 42
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -22,5 +22,5 @@ the symlink `~/.nix-profile` is made to point to *path*.
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -S ~/my-profile
|
$ nix-env --switch-profile ~/my-profile
|
||||||
```
|
```
|
||||||
|
|
|
@ -24,5 +24,5 @@ designated by the symbolic names *drvnames* are removed.
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --uninstall gcc
|
$ nix-env --uninstall gcc
|
||||||
$ nix-env -e '.*' (remove everything)
|
$ nix-env --uninstall '.*' (remove everything)
|
||||||
```
|
```
|
||||||
|
|
|
@ -76,21 +76,21 @@ version is installed.
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --upgrade -A nixpkgs.gcc
|
$ nix-env --upgrade --attr nixpkgs.gcc
|
||||||
upgrading `gcc-3.3.1' to `gcc-3.4'
|
upgrading `gcc-3.3.1' to `gcc-3.4'
|
||||||
```
|
```
|
||||||
|
|
||||||
When there are no updates available, nothing will happen:
|
When there are no updates available, nothing will happen:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --upgrade -A nixpkgs.pan
|
$ nix-env --upgrade --attr nixpkgs.pan
|
||||||
```
|
```
|
||||||
|
|
||||||
Using `-A` is preferred when possible, as it is faster and unambiguous but
|
Using `-A` is preferred when possible, as it is faster and unambiguous but
|
||||||
it is also possible to upgrade to a specific version by matching the derivation name:
|
it is also possible to upgrade to a specific version by matching the derivation name:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u gcc-3.3.2 --always
|
$ nix-env --upgrade gcc-3.3.2 --always
|
||||||
upgrading `gcc-3.4' to `gcc-3.3.2'
|
upgrading `gcc-3.4' to `gcc-3.3.2'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ To try to upgrade everything
|
||||||
(matching packages based on the part of the derivation name without version):
|
(matching packages based on the part of the derivation name without version):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u
|
$ nix-env --upgrade
|
||||||
upgrading `hello-2.1.2' to `hello-2.1.3'
|
upgrading `hello-2.1.2' to `hello-2.1.3'
|
||||||
upgrading `mozilla-1.2' to `mozilla-1.4'
|
upgrading `mozilla-1.2' to `mozilla-1.4'
|
||||||
```
|
```
|
||||||
|
|
|
@ -88,7 +88,7 @@ Instantiate [store derivation]s from a Nix expression, and build them using `nix
|
||||||
$ nix-instantiate test.nix (instantiate)
|
$ nix-instantiate test.nix (instantiate)
|
||||||
/nix/store/cigxbmvy6dzix98dxxh9b6shg7ar5bvs-perl-BerkeleyDB-0.26.drv
|
/nix/store/cigxbmvy6dzix98dxxh9b6shg7ar5bvs-perl-BerkeleyDB-0.26.drv
|
||||||
|
|
||||||
$ nix-store -r $(nix-instantiate test.nix) (build)
|
$ nix-store --realise $(nix-instantiate test.nix) (build)
|
||||||
...
|
...
|
||||||
/nix/store/qhqk4n8ci095g3sdp93x7rgwyh9rdvgk-perl-BerkeleyDB-0.26 (output path)
|
/nix/store/qhqk4n8ci095g3sdp93x7rgwyh9rdvgk-perl-BerkeleyDB-0.26 (output path)
|
||||||
|
|
||||||
|
@ -100,30 +100,30 @@ dr-xr-xr-x 2 eelco users 4096 1970-01-01 01:00 lib
|
||||||
You can also give a Nix expression on the command line:
|
You can also give a Nix expression on the command line:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate -E 'with import <nixpkgs> { }; hello'
|
$ nix-instantiate --expr 'with import <nixpkgs> { }; hello'
|
||||||
/nix/store/j8s4zyv75a724q38cb0r87rlczaiag4y-hello-2.8.drv
|
/nix/store/j8s4zyv75a724q38cb0r87rlczaiag4y-hello-2.8.drv
|
||||||
```
|
```
|
||||||
|
|
||||||
This is equivalent to:
|
This is equivalent to:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate '<nixpkgs>' -A hello
|
$ nix-instantiate '<nixpkgs>' --attr hello
|
||||||
```
|
```
|
||||||
|
|
||||||
Parsing and evaluating Nix expressions:
|
Parsing and evaluating Nix expressions:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --parse -E '1 + 2'
|
$ nix-instantiate --parse --expr '1 + 2'
|
||||||
1 + 2
|
1 + 2
|
||||||
```
|
```
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --eval -E '1 + 2'
|
$ nix-instantiate --eval --expr '1 + 2'
|
||||||
3
|
3
|
||||||
```
|
```
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --eval --xml -E '1 + 2'
|
$ nix-instantiate --eval --xml --expr '1 + 2'
|
||||||
<?xml version='1.0' encoding='utf-8'?>
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
<expr>
|
<expr>
|
||||||
<int value="3" />
|
<int value="3" />
|
||||||
|
@ -133,7 +133,7 @@ $ nix-instantiate --eval --xml -E '1 + 2'
|
||||||
The difference between non-strict and strict evaluation:
|
The difference between non-strict and strict evaluation:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --eval --xml -E 'rec { x = "foo"; y = x; }'
|
$ nix-instantiate --eval --xml --expr 'rec { x = "foo"; y = x; }'
|
||||||
...
|
...
|
||||||
<attr name="x">
|
<attr name="x">
|
||||||
<string value="foo" />
|
<string value="foo" />
|
||||||
|
@ -148,7 +148,7 @@ Note that `y` is left unevaluated (the XML representation doesn’t
|
||||||
attempt to show non-normal forms).
|
attempt to show non-normal forms).
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --eval --xml --strict -E 'rec { x = "foo"; y = x; }'
|
$ nix-instantiate --eval --xml --strict --expr 'rec { x = "foo"; y = x; }'
|
||||||
...
|
...
|
||||||
<attr name="x">
|
<attr name="x">
|
||||||
<string value="foo" />
|
<string value="foo" />
|
||||||
|
|
|
@ -89,7 +89,7 @@ All options not listed here are passed to `nix-store
|
||||||
- `--packages` / `-p` *packages*…\
|
- `--packages` / `-p` *packages*…\
|
||||||
Set up an environment in which the specified packages are present.
|
Set up an environment in which the specified packages are present.
|
||||||
The command line arguments are interpreted as attribute names inside
|
The command line arguments are interpreted as attribute names inside
|
||||||
the Nix Packages collection. Thus, `nix-shell -p libjpeg openjdk`
|
the Nix Packages collection. Thus, `nix-shell --packages libjpeg openjdk`
|
||||||
will start a shell in which the packages denoted by the attribute
|
will start a shell in which the packages denoted by the attribute
|
||||||
names `libjpeg` and `openjdk` are present.
|
names `libjpeg` and `openjdk` are present.
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ To build the dependencies of the package Pan, and start an interactive
|
||||||
shell in which to build it:
|
shell in which to build it:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell '<nixpkgs>' -A pan
|
$ nix-shell '<nixpkgs>' --attr pan
|
||||||
[nix-shell]$ eval ${unpackPhase:-unpackPhase}
|
[nix-shell]$ eval ${unpackPhase:-unpackPhase}
|
||||||
[nix-shell]$ cd $sourceRoot
|
[nix-shell]$ cd $sourceRoot
|
||||||
[nix-shell]$ eval ${patchPhase:-patchPhase}
|
[nix-shell]$ eval ${patchPhase:-patchPhase}
|
||||||
|
@ -137,7 +137,7 @@ To clear the environment first, and do some additional automatic
|
||||||
initialisation of the interactive shell:
|
initialisation of the interactive shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell '<nixpkgs>' -A pan --pure \
|
$ nix-shell '<nixpkgs>' --attr pan --pure \
|
||||||
--command 'export NIX_DEBUG=1; export NIX_CORES=8; return'
|
--command 'export NIX_DEBUG=1; export NIX_CORES=8; return'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -146,13 +146,13 @@ Nix expressions can also be given on the command line using the `-E` and
|
||||||
packages `sqlite` and `libX11`:
|
packages `sqlite` and `libX11`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -E 'with import <nixpkgs> { }; runCommand "dummy" { buildInputs = [ sqlite xorg.libX11 ]; } ""'
|
$ nix-shell --expr 'with import <nixpkgs> { }; runCommand "dummy" { buildInputs = [ sqlite xorg.libX11 ]; } ""'
|
||||||
```
|
```
|
||||||
|
|
||||||
A shorter way to do the same is:
|
A shorter way to do the same is:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -p sqlite xorg.libX11
|
$ nix-shell --packages sqlite xorg.libX11
|
||||||
[nix-shell]$ echo $NIX_LDFLAGS
|
[nix-shell]$ echo $NIX_LDFLAGS
|
||||||
… -L/nix/store/j1zg5v…-sqlite-3.8.0.2/lib -L/nix/store/0gmcz9…-libX11-1.6.1/lib …
|
… -L/nix/store/j1zg5v…-sqlite-3.8.0.2/lib -L/nix/store/0gmcz9…-libX11-1.6.1/lib …
|
||||||
```
|
```
|
||||||
|
@ -162,7 +162,7 @@ the `buildInputs = [ ... ]` shown above, not only package names. So the
|
||||||
following is also legal:
|
following is also legal:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -p sqlite 'git.override { withManual = false; }'
|
$ nix-shell --packages sqlite 'git.override { withManual = false; }'
|
||||||
```
|
```
|
||||||
|
|
||||||
The `-p` flag looks up Nixpkgs in the Nix search path. You can override
|
The `-p` flag looks up Nixpkgs in the Nix search path. You can override
|
||||||
|
@ -171,7 +171,7 @@ gives you a shell containing the Pan package from a specific revision of
|
||||||
Nixpkgs:
|
Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -p pan -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/8a3eea054838b55aca962c3fbde9c83c102b8bf2.tar.gz
|
$ nix-shell --packages pan -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/8a3eea054838b55aca962c3fbde9c83c102b8bf2.tar.gz
|
||||||
|
|
||||||
[nix-shell:~]$ pan --version
|
[nix-shell:~]$ pan --version
|
||||||
Pan 0.139
|
Pan 0.139
|
||||||
|
@ -185,7 +185,7 @@ done by starting the script with the following lines:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
#! /usr/bin/env nix-shell
|
#! /usr/bin/env nix-shell
|
||||||
#! nix-shell -i real-interpreter -p packages
|
#! nix-shell -i real-interpreter --packages packages
|
||||||
```
|
```
|
||||||
|
|
||||||
where *real-interpreter* is the “real” script interpreter that will be
|
where *real-interpreter* is the “real” script interpreter that will be
|
||||||
|
@ -202,7 +202,7 @@ For example, here is a Python script that depends on Python and the
|
||||||
|
|
||||||
```python
|
```python
|
||||||
#! /usr/bin/env nix-shell
|
#! /usr/bin/env nix-shell
|
||||||
#! nix-shell -i python -p python pythonPackages.prettytable
|
#! nix-shell -i python --packages python pythonPackages.prettytable
|
||||||
|
|
||||||
import prettytable
|
import prettytable
|
||||||
|
|
||||||
|
@ -217,7 +217,7 @@ requires Perl and the `HTML::TokeParser::Simple` and `LWP` packages:
|
||||||
|
|
||||||
```perl
|
```perl
|
||||||
#! /usr/bin/env nix-shell
|
#! /usr/bin/env nix-shell
|
||||||
#! nix-shell -i perl -p perl perlPackages.HTMLTokeParserSimple perlPackages.LWP
|
#! nix-shell -i perl --packages perl perlPackages.HTMLTokeParserSimple perlPackages.LWP
|
||||||
|
|
||||||
use HTML::TokeParser::Simple;
|
use HTML::TokeParser::Simple;
|
||||||
|
|
||||||
|
@ -235,7 +235,7 @@ package like Terraform:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
#! /usr/bin/env nix-shell
|
#! /usr/bin/env nix-shell
|
||||||
#! nix-shell -i bash -p "terraform.withPlugins (plugins: [ plugins.openstack ])"
|
#! nix-shell -i bash --packages "terraform.withPlugins (plugins: [ plugins.openstack ])"
|
||||||
|
|
||||||
terraform apply
|
terraform apply
|
||||||
```
|
```
|
||||||
|
@ -251,7 +251,7 @@ branch):
|
||||||
|
|
||||||
```haskell
|
```haskell
|
||||||
#! /usr/bin/env nix-shell
|
#! /usr/bin/env nix-shell
|
||||||
#! nix-shell -i runghc -p "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])"
|
#! nix-shell -i runghc --packages "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])"
|
||||||
#! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-20.03.tar.gz
|
#! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-20.03.tar.gz
|
||||||
|
|
||||||
import Network.Curl.Download
|
import Network.Curl.Download
|
||||||
|
|
|
@ -23,7 +23,7 @@ produce the same NAR archive. For instance, directory entries are
|
||||||
always sorted so that the actual on-disk order doesn’t influence the
|
always sorted so that the actual on-disk order doesn’t influence the
|
||||||
result. This means that the cryptographic hash of a NAR dump of a
|
result. This means that the cryptographic hash of a NAR dump of a
|
||||||
path is usable as a fingerprint of the contents of the path. Indeed,
|
path is usable as a fingerprint of the contents of the path. Indeed,
|
||||||
the hashes of store paths stored in Nix’s database (see `nix-store -q
|
the hashes of store paths stored in Nix’s database (see `nix-store --query
|
||||||
--hash`) are SHA-256 hashes of the NAR dump of each store path.
|
--hash`) are SHA-256 hashes of the NAR dump of each store path.
|
||||||
|
|
||||||
NAR archives support filenames of unlimited length and 64-bit file
|
NAR archives support filenames of unlimited length and 64-bit file
|
||||||
|
|
|
@ -31,7 +31,7 @@ To copy a whole closure, do something
|
||||||
like:
|
like:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store --export $(nix-store -qR paths) > out
|
$ nix-store --export $(nix-store --query --requisites paths) > out
|
||||||
```
|
```
|
||||||
|
|
||||||
To import the whole closure again, run:
|
To import the whole closure again, run:
|
||||||
|
|
|
@ -11,7 +11,7 @@ The following options are allowed for all `nix-store` operations, but may not al
|
||||||
be created in `/nix/var/nix/gcroots/auto/`. For instance,
|
be created in `/nix/var/nix/gcroots/auto/`. For instance,
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store --add-root /home/eelco/bla/result -r ...
|
$ nix-store --add-root /home/eelco/bla/result --realise ...
|
||||||
|
|
||||||
$ ls -l /nix/var/nix/gcroots/auto
|
$ ls -l /nix/var/nix/gcroots/auto
|
||||||
lrwxrwxrwx 1 ... 2005-03-13 21:10 dn54lcypm8f8... -> /home/eelco/bla/result
|
lrwxrwxrwx 1 ... 2005-03-13 21:10 dn54lcypm8f8... -> /home/eelco/bla/result
|
||||||
|
|
|
@ -145,7 +145,7 @@ Print the closure (runtime dependencies) of the `svn` program in the
|
||||||
current user environment:
|
current user environment:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -qR $(which svn)
|
$ nix-store --query --requisites $(which svn)
|
||||||
/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
|
/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
|
||||||
/nix/store/9lz9yc6zgmc0vlqmn2ipcpkjlmbi51vv-glibc-2.3.4
|
/nix/store/9lz9yc6zgmc0vlqmn2ipcpkjlmbi51vv-glibc-2.3.4
|
||||||
...
|
...
|
||||||
|
@ -154,7 +154,7 @@ $ nix-store -qR $(which svn)
|
||||||
Print the build-time dependencies of `svn`:
|
Print the build-time dependencies of `svn`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -qR $(nix-store -qd $(which svn))
|
$ nix-store --query --requisites $(nix-store --query --deriver $(which svn))
|
||||||
/nix/store/02iizgn86m42q905rddvg4ja975bk2i4-grep-2.5.1.tar.bz2.drv
|
/nix/store/02iizgn86m42q905rddvg4ja975bk2i4-grep-2.5.1.tar.bz2.drv
|
||||||
/nix/store/07a2bzxmzwz5hp58nf03pahrv2ygwgs3-gcc-wrapper.sh
|
/nix/store/07a2bzxmzwz5hp58nf03pahrv2ygwgs3-gcc-wrapper.sh
|
||||||
/nix/store/0ma7c9wsbaxahwwl04gbw3fcd806ski4-glibc-2.3.4.drv
|
/nix/store/0ma7c9wsbaxahwwl04gbw3fcd806ski4-glibc-2.3.4.drv
|
||||||
|
@ -168,7 +168,7 @@ the derivation (`-qd`), not the closure of the output path that contains
|
||||||
Show the build-time dependencies as a tree:
|
Show the build-time dependencies as a tree:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -q --tree $(nix-store -qd $(which svn))
|
$ nix-store --query --tree $(nix-store --query --deriver $(which svn))
|
||||||
/nix/store/7i5082kfb6yjbqdbiwdhhza0am2xvh6c-subversion-1.1.4.drv
|
/nix/store/7i5082kfb6yjbqdbiwdhhza0am2xvh6c-subversion-1.1.4.drv
|
||||||
+---/nix/store/d8afh10z72n8l1cr5w42366abiblgn54-builder.sh
|
+---/nix/store/d8afh10z72n8l1cr5w42366abiblgn54-builder.sh
|
||||||
+---/nix/store/fmzxmpjx2lh849ph0l36snfj9zdibw67-bash-3.0.drv
|
+---/nix/store/fmzxmpjx2lh849ph0l36snfj9zdibw67-bash-3.0.drv
|
||||||
|
@ -180,7 +180,7 @@ $ nix-store -q --tree $(nix-store -qd $(which svn))
|
||||||
Show all paths that depend on the same OpenSSL library as `svn`:
|
Show all paths that depend on the same OpenSSL library as `svn`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -q --referrers $(nix-store -q --binding openssl $(nix-store -qd $(which svn)))
|
$ nix-store --query --referrers $(nix-store --query --binding openssl $(nix-store --query --deriver $(which svn)))
|
||||||
/nix/store/23ny9l9wixx21632y2wi4p585qhva1q8-sylpheed-1.0.0
|
/nix/store/23ny9l9wixx21632y2wi4p585qhva1q8-sylpheed-1.0.0
|
||||||
/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
|
/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
|
||||||
/nix/store/dpmvp969yhdqs7lm2r1a3gng7pyq6vy4-subversion-1.1.3
|
/nix/store/dpmvp969yhdqs7lm2r1a3gng7pyq6vy4-subversion-1.1.3
|
||||||
|
@ -191,7 +191,7 @@ Show all paths that directly or indirectly depend on the Glibc (C
|
||||||
library) used by `svn`:
|
library) used by `svn`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -q --referrers-closure $(ldd $(which svn) | grep /libc.so | awk '{print $3}')
|
$ nix-store --query --referrers-closure $(ldd $(which svn) | grep /libc.so | awk '{print $3}')
|
||||||
/nix/store/034a6h4vpz9kds5r6kzb9lhh81mscw43-libgnomeprintui-2.8.2
|
/nix/store/034a6h4vpz9kds5r6kzb9lhh81mscw43-libgnomeprintui-2.8.2
|
||||||
/nix/store/15l3yi0d45prm7a82pcrknxdh6nzmxza-gawk-3.1.4
|
/nix/store/15l3yi0d45prm7a82pcrknxdh6nzmxza-gawk-3.1.4
|
||||||
...
|
...
|
||||||
|
@ -204,7 +204,7 @@ Make a picture of the runtime dependency graph of the current user
|
||||||
environment:
|
environment:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -q --graph ~/.nix-profile | dot -Tps > graph.ps
|
$ nix-store --query --graph ~/.nix-profile | dot -Tps > graph.ps
|
||||||
$ gv graph.ps
|
$ gv graph.ps
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -212,7 +212,7 @@ Show every garbage collector root that points to a store path that
|
||||||
depends on `svn`:
|
depends on `svn`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -q --roots $(which svn)
|
$ nix-store --query --roots $(which svn)
|
||||||
/nix/var/nix/profiles/default-81-link
|
/nix/var/nix/profiles/default-81-link
|
||||||
/nix/var/nix/profiles/default-82-link
|
/nix/var/nix/profiles/default-82-link
|
||||||
/home/eelco/.local/state/nix/profiles/profile-97-link
|
/home/eelco/.local/state/nix/profiles/profile-97-link
|
||||||
|
|
|
@ -27,7 +27,7 @@ substitute, then the log is unavailable.
|
||||||
# Example
|
# Example
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -l $(which ktorrent)
|
$ nix-store --read-log $(which ktorrent)
|
||||||
building /nix/store/dhc73pvzpnzxhdgpimsd9sw39di66ph1-ktorrent-2.2.1
|
building /nix/store/dhc73pvzpnzxhdgpimsd9sw39di66ph1-ktorrent-2.2.1
|
||||||
unpacking sources
|
unpacking sources
|
||||||
unpacking source archive /nix/store/p8n1jpqs27mgkjw07pb5269717nzf5f8-ktorrent-2.2.1.tar.gz
|
unpacking source archive /nix/store/p8n1jpqs27mgkjw07pb5269717nzf5f8-ktorrent-2.2.1.tar.gz
|
||||||
|
|
|
@ -99,7 +99,7 @@ This operation is typically used to build [store derivation]s produced by
|
||||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -r $(nix-instantiate ./test.nix)
|
$ nix-store --realise $(nix-instantiate ./test.nix)
|
||||||
/nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1
|
/nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ This is essentially what [`nix-build`](@docroot@/command-ref/nix-build.md) does.
|
||||||
To test whether a previously-built derivation is deterministic:
|
To test whether a previously-built derivation is deterministic:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build '<nixpkgs>' -A hello --check -K
|
$ nix-build '<nixpkgs>' --attr hello --check -K
|
||||||
```
|
```
|
||||||
|
|
||||||
Use [`nix-store --read-log`](./read-log.md) to show the stderr and stdout of a build:
|
Use [`nix-store --read-log`](./read-log.md) to show the stderr and stdout of a build:
|
||||||
|
|
|
@ -24,6 +24,6 @@ path has changed, and 1 otherwise.
|
||||||
To verify the integrity of the `svn` command and all its dependencies:
|
To verify the integrity of the `svn` command and all its dependencies:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store --verify-path $(nix-store -qR $(which svn))
|
$ nix-store --verify-path $(nix-store --query --requisites $(which svn))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -162,11 +162,11 @@ Most Nix commands accept the following command-line options:
|
||||||
}: ...
|
}: ...
|
||||||
```
|
```
|
||||||
|
|
||||||
So if you call this Nix expression (e.g., when you do `nix-env -iA
|
So if you call this Nix expression (e.g., when you do `nix-env --install --attr
|
||||||
pkgname`), the function will be called automatically using the
|
pkgname`), the function will be called automatically using the
|
||||||
value [`builtins.currentSystem`](@docroot@/language/builtins.md) for
|
value [`builtins.currentSystem`](@docroot@/language/builtins.md) for
|
||||||
the `system` argument. You can override this using `--arg`, e.g.,
|
the `system` argument. You can override this using `--arg`, e.g.,
|
||||||
`nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that
|
`nix-env --install --attr pkgname --arg system \"i686-freebsd\"`. (Note that
|
||||||
since the argument is a Nix string literal, you have to escape the
|
since the argument is a Nix string literal, you have to escape the
|
||||||
quotes.)
|
quotes.)
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ Most Nix commands accept the following command-line options:
|
||||||
For `nix-shell`, this option is commonly used to give you a shell in
|
For `nix-shell`, this option is commonly used to give you a shell in
|
||||||
which you can build the packages returned by the expression. If you
|
which you can build the packages returned by the expression. If you
|
||||||
want to get a shell which contain the *built* packages ready for
|
want to get a shell which contain the *built* packages ready for
|
||||||
use, give your expression to the `nix-shell -p` convenience flag
|
use, give your expression to the `nix-shell --packages ` convenience flag
|
||||||
instead.
|
instead.
|
||||||
|
|
||||||
- <span id="opt-I">[`-I`](#opt-I)</span> *path*\
|
- <span id="opt-I">[`-I`](#opt-I)</span> *path*\
|
||||||
|
|
|
@ -77,7 +77,7 @@ $ nix-shell
|
||||||
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -A devShells.x86_64-linux.native-clang11StdenvPackages
|
$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
|
@ -139,7 +139,7 @@ $ nix build .#packages.aarch64-linux.default
|
||||||
for flake-enabled Nix, or
|
for flake-enabled Nix, or
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build -A packages.aarch64-linux.default
|
$ nix-build --attr packages.aarch64-linux.default
|
||||||
```
|
```
|
||||||
|
|
||||||
for classic Nix.
|
for classic Nix.
|
||||||
|
@ -166,7 +166,7 @@ $ nix build .#nix-ccacheStdenv
|
||||||
for flake-enabled Nix, or
|
for flake-enabled Nix, or
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build -A nix-ccacheStdenv
|
$ nix-build --attr nix-ccacheStdenv
|
||||||
```
|
```
|
||||||
|
|
||||||
for classic Nix.
|
for classic Nix.
|
||||||
|
|
|
@ -101,11 +101,8 @@
|
||||||
derivation.
|
derivation.
|
||||||
|
|
||||||
- [output-addressed store object]{#gloss-output-addressed-store-object}\
|
- [output-addressed store object]{#gloss-output-addressed-store-object}\
|
||||||
A store object whose store path hashes its content. This
|
A [store object] whose [store path] is determined by its contents.
|
||||||
includes derivations, the outputs of
|
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
||||||
[content-addressed derivations](#gloss-content-addressed-derivation),
|
|
||||||
and the outputs of
|
|
||||||
[fixed-output derivations](#gloss-fixed-output-derivation).
|
|
||||||
|
|
||||||
- [substitute]{#gloss-substitute}\
|
- [substitute]{#gloss-substitute}\
|
||||||
A substitute is a command invocation stored in the [Nix database] that
|
A substitute is a command invocation stored in the [Nix database] that
|
||||||
|
@ -163,7 +160,7 @@
|
||||||
build-time dependencies, while the closure of its output path is
|
build-time dependencies, while the closure of its output path is
|
||||||
equivalent to its runtime dependencies. For correct deployment it
|
equivalent to its runtime dependencies. For correct deployment it
|
||||||
is necessary to deploy whole closures, since otherwise at runtime
|
is necessary to deploy whole closures, since otherwise at runtime
|
||||||
files could be missing. The command `nix-store -qR` prints out
|
files could be missing. The command `nix-store --query --requisites ` prints out
|
||||||
closures of store paths.
|
closures of store paths.
|
||||||
|
|
||||||
As an example, if the [store object] at path `P` contains a [reference]
|
As an example, if the [store object] at path `P` contains a [reference]
|
||||||
|
|
|
@ -24,23 +24,10 @@ If you are on Linux with systemd:
|
||||||
sudo systemctl daemon-reload
|
sudo systemctl daemon-reload
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Remove systemd service files:
|
|
||||||
|
|
||||||
```console
|
|
||||||
sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
|
|
||||||
```
|
|
||||||
|
|
||||||
1. The installer script uses systemd-tmpfiles to create the socket directory.
|
|
||||||
You may also want to remove the configuration for that:
|
|
||||||
|
|
||||||
```console
|
|
||||||
sudo rm /etc/tmpfiles.d/nix-daemon.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
Remove files created by Nix:
|
Remove files created by Nix:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
sudo rm -rf /etc/nix /etc/profile.d/nix.sh /etc/tmpfiles.d/nix-daemon.conf /nix ~root/.nix-channels ~root/.nix-defexpr ~root/.nix-profile
|
||||||
```
|
```
|
||||||
|
|
||||||
Remove build users and their group:
|
Remove build users and their group:
|
||||||
|
@ -54,8 +41,10 @@ sudo groupdel nixbld
|
||||||
|
|
||||||
There may also be references to Nix in
|
There may also be references to Nix in
|
||||||
|
|
||||||
- `/etc/profile`
|
- `/etc/bash.bashrc`
|
||||||
- `/etc/bashrc`
|
- `/etc/bashrc`
|
||||||
|
- `/etc/profile`
|
||||||
|
- `/etc/zsh/zshrc`
|
||||||
- `/etc/zshrc`
|
- `/etc/zshrc`
|
||||||
|
|
||||||
which you may remove.
|
which you may remove.
|
||||||
|
|
|
@ -2,13 +2,13 @@
|
||||||
|
|
||||||
Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c
|
Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c
|
||||||
'nix-channel --update &&
|
'nix-channel --update &&
|
||||||
nix-env -iA nixpkgs.nix &&
|
nix-env --install --attr nixpkgs.nix &&
|
||||||
launchctl remove org.nixos.nix-daemon &&
|
launchctl remove org.nixos.nix-daemon &&
|
||||||
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'`
|
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'`
|
||||||
|
|
||||||
Single-user installations of Nix should run this: `nix-channel --update;
|
Single-user installations of Nix should run this: `nix-channel --update;
|
||||||
nix-env -iA nixpkgs.nix nixpkgs.cacert`
|
nix-env --install --attr nixpkgs.nix nixpkgs.cacert`
|
||||||
|
|
||||||
Multi-user Nix users on Linux should run this with sudo: `nix-channel
|
Multi-user Nix users on Linux should run this with sudo: `nix-channel
|
||||||
--update; nix-env -iA nixpkgs.nix nixpkgs.cacert; systemctl
|
--update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl
|
||||||
daemon-reload; systemctl restart nix-daemon`
|
daemon-reload; systemctl restart nix-daemon`
|
||||||
|
|
|
@ -76,7 +76,7 @@ there after an upgrade. This means that you can _roll back_ to the
|
||||||
old version:
|
old version:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --upgrade -A nixpkgs.some-package
|
$ nix-env --upgrade --attr nixpkgs.some-package
|
||||||
$ nix-env --rollback
|
$ nix-env --rollback
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -122,7 +122,7 @@ Nix expressions generally describe how to build a package from
|
||||||
source, so an installation action like
|
source, so an installation action like
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --install -A nixpkgs.firefox
|
$ nix-env --install --attr nixpkgs.firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
_could_ cause quite a bit of build activity, as not only Firefox but
|
_could_ cause quite a bit of build activity, as not only Firefox but
|
||||||
|
@ -158,7 +158,7 @@ Pan newsreader, as described by [its
|
||||||
Nix expression](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/newsreaders/pan/default.nix):
|
Nix expression](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/newsreaders/pan/default.nix):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell '<nixpkgs>' -A pan
|
$ nix-shell '<nixpkgs>' --attr pan
|
||||||
```
|
```
|
||||||
|
|
||||||
You’re then dropped into a shell where you can edit, build and test
|
You’re then dropped into a shell where you can edit, build and test
|
||||||
|
|
|
@ -1,20 +1,19 @@
|
||||||
# Built-in Constants
|
# Built-in Constants
|
||||||
|
|
||||||
Here are the constants built into the Nix expression evaluator:
|
These constants are built into the Nix language evaluator:
|
||||||
|
|
||||||
- `builtins`\
|
- [`builtins`]{#builtins-builtins} (attribute set)
|
||||||
The set `builtins` contains all the built-in functions and values.
|
|
||||||
You can use `builtins` to test for the availability of features in
|
|
||||||
the Nix installation, e.g.,
|
|
||||||
|
|
||||||
```nix
|
|
||||||
if builtins ? getEnv then builtins.getEnv "PATH" else ""
|
|
||||||
```
|
|
||||||
|
|
||||||
This allows a Nix expression to fall back gracefully on older Nix
|
|
||||||
installations that don’t have the desired built-in function.
|
|
||||||
|
|
||||||
- [`builtins.currentSystem`]{#builtins-currentSystem}\
|
Contains all the [built-in functions](./builtins.md) and values, in order to avoid polluting the global scope.
|
||||||
The built-in value `currentSystem` evaluates to the Nix platform
|
|
||||||
identifier for the Nix installation on which the expression is being
|
Since built-in functions were added over time, [testing for attributes](./operators.md#has-attribute) in `builtins` can be used for graceful fallback on older Nix installations:
|
||||||
evaluated, such as `"i686-linux"` or `"x86_64-darwin"`.
|
|
||||||
|
```nix
|
||||||
|
if builtins ? getEnv then builtins.getEnv "PATH" else ""
|
||||||
|
```
|
||||||
|
|
||||||
|
- [`builtins.currentSystem`]{#builtins-currentSystem} (string)
|
||||||
|
|
||||||
|
The built-in value `currentSystem` evaluates to the Nix platform
|
||||||
|
identifier for the Nix installation on which the expression is being
|
||||||
|
evaluated, such as `"i686-linux"` or `"x86_64-darwin"`.
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
# Built-in Functions
|
# Built-in Functions
|
||||||
|
|
||||||
This section lists the functions built into the Nix expression
|
This section lists the functions built into the Nix language evaluator.
|
||||||
evaluator. (The built-in function `derivation` is discussed above.)
|
All built-in functions are available through the global [`builtins`](./builtin-constants.md#builtins-builtins) constant.
|
||||||
Some built-ins, such as `derivation`, are always in scope of every Nix
|
|
||||||
expression; you can just access them right away. But to prevent
|
For convenience, some built-ins are can be accessed directly:
|
||||||
polluting the namespace too much, most built-ins are not in
|
|
||||||
scope. Instead, you can access them through the `builtins` built-in
|
- [`derivation`](#builtins-derivation)
|
||||||
value, which is a set that contains all built-in functions and values.
|
- [`import`](#builtins-import)
|
||||||
For instance, `derivation` is also available as `builtins.derivation`.
|
- [`abort`](#builtins-abort)
|
||||||
|
- [`throw`](#builtins-throw)
|
||||||
|
|
||||||
<dl>
|
<dl>
|
||||||
<dt><code>derivation <var>attrs</var></code>;
|
<dt id="builtins-derivation"><a href="#builtins-derivation"><code>derivation <var>attrs</var></code></a></dt>
|
||||||
<code>builtins.derivation <var>attrs</var></code></dt>
|
|
||||||
<dd><p><var>derivation</var> is described in
|
<dd><p><var>derivation</var> is described in
|
||||||
<a href="derivations.md">its own section</a>.</p></dd>
|
<a href="derivations.md">its own section</a>.</p></dd>
|
||||||
|
|
|
@ -36,7 +36,7 @@
|
||||||
## Attribute selection
|
## Attribute selection
|
||||||
|
|
||||||
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
||||||
If the attribute doesn’t exist, return *value* if provided, otherwise abort evaluation.
|
If the attribute doesn’t exist, return the *expr* after `or` if provided, otherwise abort evaluation.
|
||||||
|
|
||||||
<!-- FIXME: the following should to into its own language syntax section, but that needs more work to fit in well -->
|
<!-- FIXME: the following should to into its own language syntax section, but that needs more work to fit in well -->
|
||||||
|
|
||||||
|
|
|
@ -190,13 +190,17 @@ instance,
|
||||||
```
|
```
|
||||||
|
|
||||||
evaluates to `"Foo"`. It is possible to provide a default value in an
|
evaluates to `"Foo"`. It is possible to provide a default value in an
|
||||||
attribute selection using the `or` keyword. For example,
|
attribute selection using the `or` keyword:
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ a = "Foo"; b = "Bar"; }.c or "Xyzzy"
|
{ a = "Foo"; b = "Bar"; }.c or "Xyzzy"
|
||||||
```
|
```
|
||||||
|
|
||||||
will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
|
```nix
|
||||||
|
{ a = "Foo"; b = "Bar"; }.c.d.e.f.g or "Xyzzy"
|
||||||
|
```
|
||||||
|
|
||||||
|
will both evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
|
||||||
|
|
||||||
You can use arbitrary double-quoted strings as attribute names:
|
You can use arbitrary double-quoted strings as attribute names:
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ $ nix-channel --update
|
||||||
You can view the set of available packages in Nixpkgs:
|
You can view the set of available packages in Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qaP
|
$ nix-env --query --available --attr-path
|
||||||
nixpkgs.aterm aterm-2.2
|
nixpkgs.aterm aterm-2.2
|
||||||
nixpkgs.bash bash-3.0
|
nixpkgs.bash bash-3.0
|
||||||
nixpkgs.binutils binutils-2.15
|
nixpkgs.binutils binutils-2.15
|
||||||
|
@ -65,7 +65,7 @@ If you downloaded Nixpkgs yourself, or if you checked it out from GitHub,
|
||||||
then you need to pass the path to your Nixpkgs tree using the `-f` flag:
|
then you need to pass the path to your Nixpkgs tree using the `-f` flag:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qaPf /path/to/nixpkgs
|
$ nix-env --query --available --attr-path --file /path/to/nixpkgs
|
||||||
aterm aterm-2.2
|
aterm aterm-2.2
|
||||||
bash bash-3.0
|
bash bash-3.0
|
||||||
…
|
…
|
||||||
|
@ -77,7 +77,7 @@ Nixpkgs.
|
||||||
You can filter the packages by name:
|
You can filter the packages by name:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qaP firefox
|
$ nix-env --query --available --attr-path firefox
|
||||||
nixpkgs.firefox-esr firefox-91.3.0esr
|
nixpkgs.firefox-esr firefox-91.3.0esr
|
||||||
nixpkgs.firefox firefox-94.0.1
|
nixpkgs.firefox firefox-94.0.1
|
||||||
```
|
```
|
||||||
|
@ -85,7 +85,7 @@ nixpkgs.firefox firefox-94.0.1
|
||||||
and using regular expressions:
|
and using regular expressions:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qaP 'firefox.*'
|
$ nix-env --query --available --attr-path 'firefox.*'
|
||||||
```
|
```
|
||||||
|
|
||||||
It is also possible to see the *status* of available packages, i.e.,
|
It is also possible to see the *status* of available packages, i.e.,
|
||||||
|
@ -93,7 +93,7 @@ whether they are installed into the user environment and/or present in
|
||||||
the system:
|
the system:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qaPs
|
$ nix-env --query --available --attr-path --status
|
||||||
…
|
…
|
||||||
-PS nixpkgs.bash bash-3.0
|
-PS nixpkgs.bash bash-3.0
|
||||||
--S nixpkgs.binutils binutils-2.15
|
--S nixpkgs.binutils binutils-2.15
|
||||||
|
@ -110,10 +110,10 @@ which is Nix’s mechanism for doing binary deployment. It just means that
|
||||||
Nix knows that it can fetch a pre-built package from somewhere
|
Nix knows that it can fetch a pre-built package from somewhere
|
||||||
(typically a network server) instead of building it locally.
|
(typically a network server) instead of building it locally.
|
||||||
|
|
||||||
You can install a package using `nix-env -iA`. For instance,
|
You can install a package using `nix-env --install --attr `. For instance,
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.subversion
|
$ nix-env --install --attr nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
will install the package called `subversion` from `nixpkgs` channel (which is, of course, the
|
will install the package called `subversion` from `nixpkgs` channel (which is, of course, the
|
||||||
|
@ -143,14 +143,14 @@ instead of the attribute path, as `nix-env` does not record which attribute
|
||||||
was used for installing:
|
was used for installing:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -e subversion
|
$ nix-env --uninstall subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
Upgrading to a new version is just as easy. If you have a new release of
|
Upgrading to a new version is just as easy. If you have a new release of
|
||||||
Nix Packages, you can do:
|
Nix Packages, you can do:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -uA nixpkgs.subversion
|
$ nix-env --upgrade --attr nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
This will *only* upgrade Subversion if there is a “newer” version in the
|
This will *only* upgrade Subversion if there is a “newer” version in the
|
||||||
|
@ -163,15 +163,15 @@ whatever version is in the Nix expressions, use `-i` instead of `-u`;
|
||||||
You can also upgrade all packages for which there are newer versions:
|
You can also upgrade all packages for which there are newer versions:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u
|
$ nix-env --upgrade
|
||||||
```
|
```
|
||||||
|
|
||||||
Sometimes it’s useful to be able to ask what `nix-env` would do, without
|
Sometimes it’s useful to be able to ask what `nix-env` would do, without
|
||||||
actually doing it. For instance, to find out what packages would be
|
actually doing it. For instance, to find out what packages would be
|
||||||
upgraded by `nix-env -u`, you can do
|
upgraded by `nix-env --upgrade `, you can do
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u --dry-run
|
$ nix-env --upgrade --dry-run
|
||||||
(dry run; not doing anything)
|
(dry run; not doing anything)
|
||||||
upgrading `libxslt-1.1.0' to `libxslt-1.1.10'
|
upgrading `libxslt-1.1.0' to `libxslt-1.1.10'
|
||||||
upgrading `graphviz-1.10' to `graphviz-1.12'
|
upgrading `graphviz-1.10' to `graphviz-1.12'
|
||||||
|
|
|
@ -9,7 +9,7 @@ The daemon that handles binary cache requests via HTTP, `nix-serve`, is
|
||||||
not part of the Nix distribution, but you can install it from Nixpkgs:
|
not part of the Nix distribution, but you can install it from Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.nix-serve
|
$ nix-env --install --attr nixpkgs.nix-serve
|
||||||
```
|
```
|
||||||
|
|
||||||
You can then start the server, listening for HTTP connections on
|
You can then start the server, listening for HTTP connections on
|
||||||
|
@ -35,7 +35,7 @@ On the client side, you can tell Nix to use your binary cache using
|
||||||
`--substituters`, e.g.:
|
`--substituters`, e.g.:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.firefox --substituters http://avalon:8080/
|
$ nix-env --install --attr nixpkgs.firefox --substituters http://avalon:8080/
|
||||||
```
|
```
|
||||||
|
|
||||||
The option `substituters` tells Nix to use this binary cache in
|
The option `substituters` tells Nix to use this binary cache in
|
||||||
|
|
|
@ -43,7 +43,7 @@ operations (via the symlink `~/.nix-defexpr/channels`). Consequently,
|
||||||
you can then say
|
you can then say
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u
|
$ nix-env --upgrade
|
||||||
```
|
```
|
||||||
|
|
||||||
to upgrade all packages in your profile to the latest versions available
|
to upgrade all packages in your profile to the latest versions available
|
||||||
|
|
|
@ -15,7 +15,7 @@ With `nix-store
|
||||||
path (that is, the path and all its dependencies) to a file, and then
|
path (that is, the path and all its dependencies) to a file, and then
|
||||||
unpack that file into another Nix store. For example,
|
unpack that file into another Nix store. For example,
|
||||||
|
|
||||||
$ nix-store --export $(nix-store -qR $(type -p firefox)) > firefox.closure
|
$ nix-store --export $(nix-store --query --requisites $(type -p firefox)) > firefox.closure
|
||||||
|
|
||||||
writes the closure of Firefox to a file. You can then copy this file to
|
writes the closure of Firefox to a file. You can then copy this file to
|
||||||
another machine and install the closure:
|
another machine and install the closure:
|
||||||
|
@ -27,7 +27,7 @@ store are ignored. It is also possible to pipe the export into another
|
||||||
command, e.g. to copy and install a closure directly to/on another
|
command, e.g. to copy and install a closure directly to/on another
|
||||||
machine:
|
machine:
|
||||||
|
|
||||||
$ nix-store --export $(nix-store -qR $(type -p firefox)) | bzip2 | \
|
$ nix-store --export $(nix-store --query --requisites $(type -p firefox)) | bzip2 | \
|
||||||
ssh alice@itchy.example.org "bunzip2 | nix-store --import"
|
ssh alice@itchy.example.org "bunzip2 | nix-store --import"
|
||||||
|
|
||||||
However, `nix-copy-closure` is generally more efficient because it only
|
However, `nix-copy-closure` is generally more efficient because it only
|
||||||
|
|
|
@ -39,7 +39,7 @@ just Subversion 1.1.2 (arrows in the figure indicate symlinks). This
|
||||||
would be what we would obtain if we had done
|
would be what we would obtain if we had done
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.subversion
|
$ nix-env --install --attr nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
on a set of Nix expressions that contained Subversion 1.1.2.
|
on a set of Nix expressions that contained Subversion 1.1.2.
|
||||||
|
@ -54,7 +54,7 @@ environment is generated based on the current one. For instance,
|
||||||
generation 43 was created from generation 42 when we did
|
generation 43 was created from generation 42 when we did
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.subversion nixpkgs.firefox
|
$ nix-env --install --attr nixpkgs.subversion nixpkgs.firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
on a set of Nix expressions that contained Firefox and a new version of
|
on a set of Nix expressions that contained Firefox and a new version of
|
||||||
|
@ -127,7 +127,7 @@ All `nix-env` operations work on the profile pointed to by
|
||||||
(abbreviation `-p`):
|
(abbreviation `-p`):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -p /nix/var/nix/profiles/other-profile -iA nixpkgs.subversion
|
$ nix-env --profile /nix/var/nix/profiles/other-profile --install --attr nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
This will *not* change the `~/.nix-profile` symlink.
|
This will *not* change the `~/.nix-profile` symlink.
|
||||||
|
|
|
@ -6,7 +6,7 @@ automatically fetching any store paths in Firefox’s closure if they are
|
||||||
available on the server `avalon`:
|
available on the server `avalon`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.firefox --substituters ssh://alice@avalon
|
$ nix-env --install --attr nixpkgs.firefox --substituters ssh://alice@avalon
|
||||||
```
|
```
|
||||||
|
|
||||||
This works similar to the binary cache substituter that Nix usually
|
This works similar to the binary cache substituter that Nix usually
|
||||||
|
@ -25,7 +25,7 @@ You can also copy the closure of some store path, without installing it
|
||||||
into your profile, e.g.
|
into your profile, e.g.
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -r /nix/store/m85bxg…-firefox-34.0.5 --substituters
|
$ nix-store --realise /nix/store/m85bxg…-firefox-34.0.5 --substituters
|
||||||
ssh://alice@avalon
|
ssh://alice@avalon
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,2 +1,6 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
- Speed-up of downloads from binary caches.
|
||||||
|
The number of parallel downloads (also known as substitutions) has been separated from the [`--max-jobs` setting](../command-ref/conf-file.md#conf-max-jobs).
|
||||||
|
The new setting is called [`max-substitution-jobs`](../command-ref/conf-file.md#conf-max-substitution-jobs).
|
||||||
|
The number of parallel downloads is now set to 16 by default (previously, the default was 1 due to the coupling to build jobs).
|
||||||
|
|
14
docker.nix
14
docker.nix
|
@ -190,6 +190,12 @@ let
|
||||||
cp -a ${rootEnv}/* $out/
|
cp -a ${rootEnv}/* $out/
|
||||||
ln -s ${manifest} $out/manifest.nix
|
ln -s ${manifest} $out/manifest.nix
|
||||||
'';
|
'';
|
||||||
|
flake-registry-path = if (flake-registry == null) then
|
||||||
|
null
|
||||||
|
else if (builtins.readFileType (toString flake-registry)) == "directory" then
|
||||||
|
"${flake-registry}/flake-registry.json"
|
||||||
|
else
|
||||||
|
flake-registry;
|
||||||
in
|
in
|
||||||
pkgs.runCommand "base-system"
|
pkgs.runCommand "base-system"
|
||||||
{
|
{
|
||||||
|
@ -202,7 +208,7 @@ let
|
||||||
];
|
];
|
||||||
allowSubstitutes = false;
|
allowSubstitutes = false;
|
||||||
preferLocalBuild = true;
|
preferLocalBuild = true;
|
||||||
} ''
|
} (''
|
||||||
env
|
env
|
||||||
set -x
|
set -x
|
||||||
mkdir -p $out/etc
|
mkdir -p $out/etc
|
||||||
|
@ -249,15 +255,15 @@ let
|
||||||
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
|
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
|
||||||
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
|
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
|
||||||
|
|
||||||
'' + (lib.optionalString (flake-registry != null) ''
|
'' + (lib.optionalString (flake-registry-path != null) ''
|
||||||
nixCacheDir="/root/.cache/nix"
|
nixCacheDir="/root/.cache/nix"
|
||||||
mkdir -p $out$nixCacheDir
|
mkdir -p $out$nixCacheDir
|
||||||
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
|
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
|
||||||
ln -s ${flake-registry}/flake-registry.json $out$globalFlakeRegistryPath
|
ln -s ${flake-registry-path} $out$globalFlakeRegistryPath
|
||||||
mkdir -p $out/nix/var/nix/gcroots/auto
|
mkdir -p $out/nix/var/nix/gcroots/auto
|
||||||
rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
|
rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
|
||||||
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
|
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
|
||||||
'');
|
''));
|
||||||
|
|
||||||
in
|
in
|
||||||
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
||||||
|
|
5
mk/cxx-big-literal.mk
Normal file
5
mk/cxx-big-literal.mk
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
%.gen.hh: %
|
||||||
|
@echo 'R"foo(' >> $@.tmp
|
||||||
|
$(trace-gen) cat $< >> $@.tmp
|
||||||
|
@echo ')foo"' >> $@.tmp
|
||||||
|
@mv $@.tmp $@
|
|
@ -101,6 +101,7 @@ include mk/libraries.mk
|
||||||
include mk/programs.mk
|
include mk/programs.mk
|
||||||
include mk/patterns.mk
|
include mk/patterns.mk
|
||||||
include mk/templates.mk
|
include mk/templates.mk
|
||||||
|
include mk/cxx-big-literal.mk
|
||||||
include mk/tests.mk
|
include mk/tests.mk
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -258,6 +258,8 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
connected:
|
connected:
|
||||||
close(5);
|
close(5);
|
||||||
|
|
||||||
|
assert(sshStore);
|
||||||
|
|
||||||
std::cerr << "# accept\n" << storeUri << "\n";
|
std::cerr << "# accept\n" << storeUri << "\n";
|
||||||
|
|
||||||
auto inputs = readStrings<PathSet>(source);
|
auto inputs = readStrings<PathSet>(source);
|
||||||
|
@ -286,23 +288,48 @@ connected:
|
||||||
uploadLock = -1;
|
uploadLock = -1;
|
||||||
|
|
||||||
auto drv = store->readDerivation(*drvPath);
|
auto drv = store->readDerivation(*drvPath);
|
||||||
|
|
||||||
|
std::optional<BuildResult> optResult;
|
||||||
|
|
||||||
|
// If we don't know whether we are trusted (e.g. `ssh://`
|
||||||
|
// stores), we assume we are. This is necessary for backwards
|
||||||
|
// compat.
|
||||||
|
bool trustedOrLegacy = ({
|
||||||
|
std::optional trusted = sshStore->isTrustedClient();
|
||||||
|
!trusted || *trusted;
|
||||||
|
});
|
||||||
|
|
||||||
|
// See the very large comment in `case wopBuildDerivation:` in
|
||||||
|
// `src/libstore/daemon.cc` that explains the trust model here.
|
||||||
|
//
|
||||||
|
// This condition mirrors that: that code enforces the "rules" outlined there;
|
||||||
|
// we do the best we can given those "rules".
|
||||||
|
if (trustedOrLegacy || drv.type().isCA()) {
|
||||||
|
// Hijack the inputs paths of the derivation to include all
|
||||||
|
// the paths that come from the `inputDrvs` set. We don’t do
|
||||||
|
// that for the derivations whose `inputDrvs` is empty
|
||||||
|
// because:
|
||||||
|
//
|
||||||
|
// 1. It’s not needed
|
||||||
|
//
|
||||||
|
// 2. Changing the `inputSrcs` set changes the associated
|
||||||
|
// output ids, which break CA derivations
|
||||||
|
if (!drv.inputDrvs.empty())
|
||||||
|
drv.inputSrcs = store->parseStorePathSet(inputs);
|
||||||
|
optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv);
|
||||||
|
auto & result = *optResult;
|
||||||
|
if (!result.success())
|
||||||
|
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
|
||||||
|
} else {
|
||||||
|
copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute);
|
||||||
|
auto res = sshStore->buildPathsWithResults({ DerivedPath::Built { *drvPath, OutputsSpec::All {} } });
|
||||||
|
// One path to build should produce exactly one build result
|
||||||
|
assert(res.size() == 1);
|
||||||
|
optResult = std::move(res[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
auto outputHashes = staticOutputHashes(*store, drv);
|
auto outputHashes = staticOutputHashes(*store, drv);
|
||||||
|
|
||||||
// Hijack the inputs paths of the derivation to include all the paths
|
|
||||||
// that come from the `inputDrvs` set.
|
|
||||||
// We don’t do that for the derivations whose `inputDrvs` is empty
|
|
||||||
// because
|
|
||||||
// 1. It’s not needed
|
|
||||||
// 2. Changing the `inputSrcs` set changes the associated output ids,
|
|
||||||
// which break CA derivations
|
|
||||||
if (!drv.inputDrvs.empty())
|
|
||||||
drv.inputSrcs = store->parseStorePathSet(inputs);
|
|
||||||
|
|
||||||
auto result = sshStore->buildDerivation(*drvPath, drv);
|
|
||||||
|
|
||||||
if (!result.success())
|
|
||||||
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
|
|
||||||
|
|
||||||
std::set<Realisation> missingRealisations;
|
std::set<Realisation> missingRealisations;
|
||||||
StorePathSet missingPaths;
|
StorePathSet missingPaths;
|
||||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) {
|
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) {
|
||||||
|
@ -311,6 +338,8 @@ connected:
|
||||||
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
||||||
if (!store->queryRealisation(thisOutputId)) {
|
if (!store->queryRealisation(thisOutputId)) {
|
||||||
debug("missing output %s", outputName);
|
debug("missing output %s", outputName);
|
||||||
|
assert(optResult);
|
||||||
|
auto & result = *optResult;
|
||||||
auto i = result.builtOutputs.find(outputName);
|
auto i = result.builtOutputs.find(outputName);
|
||||||
assert(i != result.builtOutputs.end());
|
assert(i != result.builtOutputs.end());
|
||||||
auto & newRealisation = i->second;
|
auto & newRealisation = i->second;
|
||||||
|
|
|
@ -46,7 +46,15 @@ std::pair<Value *, PosIdx> InstallableAttrPath::toValue(EvalState & state)
|
||||||
|
|
||||||
DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
||||||
{
|
{
|
||||||
auto v = toValue(*state).first;
|
auto [v, pos] = toValue(*state);
|
||||||
|
|
||||||
|
if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths(
|
||||||
|
*v,
|
||||||
|
pos,
|
||||||
|
fmt("while evaluating the attribute '%s'", attrPath)))
|
||||||
|
{
|
||||||
|
return { *derivedPathWithInfo };
|
||||||
|
}
|
||||||
|
|
||||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||||
|
|
||||||
|
|
|
@ -95,31 +95,13 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||||
// FIXME: use eval cache?
|
// FIXME: use eval cache?
|
||||||
auto v = attr->forceValue();
|
auto v = attr->forceValue();
|
||||||
|
|
||||||
if (v.type() == nPath) {
|
if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths(
|
||||||
auto storePath = v.path().fetchToStore(state->store);
|
v,
|
||||||
return {{
|
noPos,
|
||||||
.path = DerivedPath::Opaque {
|
fmt("while evaluating the flake output attribute '%s'", attrPath)))
|
||||||
.path = std::move(storePath),
|
{
|
||||||
},
|
return { *derivedPathWithInfo };
|
||||||
.info = make_ref<ExtraPathInfo>(),
|
|
||||||
}};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (v.type() == nString) {
|
|
||||||
NixStringContext context;
|
|
||||||
auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath));
|
|
||||||
auto storePath = state->store->maybeParseStorePath(s);
|
|
||||||
if (storePath && context.count(NixStringContextElem::Opaque { .path = *storePath })) {
|
|
||||||
return {{
|
|
||||||
.path = DerivedPath::Opaque {
|
|
||||||
.path = std::move(*storePath),
|
|
||||||
},
|
|
||||||
.info = make_ref<ExtraPathInfo>(),
|
|
||||||
}};
|
|
||||||
} else
|
|
||||||
throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s);
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
else
|
||||||
throw Error("flake output attribute '%s' is not a derivation or path", attrPath);
|
throw Error("flake output attribute '%s' is not a derivation or path", attrPath);
|
||||||
}
|
}
|
||||||
|
@ -234,7 +216,7 @@ FlakeRef InstallableFlake::nixpkgsFlakeRef() const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return InstallableValue::nixpkgsFlakeRef();
|
return defaultNixpkgsFlakeRef();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,9 +67,22 @@ struct InstallableFlake : InstallableValue
|
||||||
|
|
||||||
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
|
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
|
||||||
|
|
||||||
FlakeRef nixpkgsFlakeRef() const override;
|
FlakeRef nixpkgsFlakeRef() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default flake ref for referring to Nixpkgs. For flakes that don't
|
||||||
|
* have their own Nixpkgs input, or other installables.
|
||||||
|
*
|
||||||
|
* It is a layer violation for Nix to know about Nixpkgs; currently just
|
||||||
|
* `nix develop` does. Be wary of using this /
|
||||||
|
* `InstallableFlake::nixpkgsFlakeRef` more places.
|
||||||
|
*/
|
||||||
|
static inline FlakeRef defaultNixpkgsFlakeRef()
|
||||||
|
{
|
||||||
|
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
|
||||||
|
}
|
||||||
|
|
||||||
ref<eval_cache::EvalCache> openEvalCache(
|
ref<eval_cache::EvalCache> openEvalCache(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
std::shared_ptr<flake::LockedFlake> lockedFlake);
|
std::shared_ptr<flake::LockedFlake> lockedFlake);
|
||||||
|
|
|
@ -41,4 +41,26 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
||||||
return ref { castedInstallable };
|
return ref { castedInstallable };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
|
{
|
||||||
|
if (v.type() == nPath) {
|
||||||
|
auto storePath = v.path().fetchToStore(state->store);
|
||||||
|
return {{
|
||||||
|
.path = DerivedPath::Opaque {
|
||||||
|
.path = std::move(storePath),
|
||||||
|
},
|
||||||
|
.info = make_ref<ExtraPathInfo>(),
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (v.type() == nString) {
|
||||||
|
return {{
|
||||||
|
.path = state->coerceToDerivedPath(pos, v, errorCtx),
|
||||||
|
.info = make_ref<ExtraPathInfo>(),
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
else return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,13 +96,26 @@ struct InstallableValue : Installable
|
||||||
|
|
||||||
UnresolvedApp toApp(EvalState & state);
|
UnresolvedApp toApp(EvalState & state);
|
||||||
|
|
||||||
virtual FlakeRef nixpkgsFlakeRef() const
|
|
||||||
{
|
|
||||||
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
|
|
||||||
}
|
|
||||||
|
|
||||||
static InstallableValue & require(Installable & installable);
|
static InstallableValue & require(Installable & installable);
|
||||||
static ref<InstallableValue> require(ref<Installable> installable);
|
static ref<InstallableValue> require(ref<Installable> installable);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles either a plain path, or a string with a single string
|
||||||
|
* context elem in the right format. The latter case is handled by
|
||||||
|
* `EvalState::coerceToDerivedPath()`; see it for details.
|
||||||
|
*
|
||||||
|
* @param v Value that is hopefully a string or path per the above.
|
||||||
|
*
|
||||||
|
* @param pos Position of value to aid with diagnostics.
|
||||||
|
*
|
||||||
|
* @param errorCtx Arbitrary message for use in potential error message when something is wrong with `v`.
|
||||||
|
*
|
||||||
|
* @result A derived path (with empty info, for now) if the value
|
||||||
|
* matched the above criteria.
|
||||||
|
*/
|
||||||
|
std::optional<DerivedPathWithInfo> trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
#include "downstream-placeholder.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "filetransfer.hh"
|
#include "filetransfer.hh"
|
||||||
|
@ -94,7 +95,6 @@ RootValue allocRootValue(Value * v)
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Value::print(const SymbolTable & symbols, std::ostream & str,
|
void Value::print(const SymbolTable & symbols, std::ostream & str,
|
||||||
std::set<const void *> * seen) const
|
std::set<const void *> * seen) const
|
||||||
{
|
{
|
||||||
|
@ -1048,6 +1048,27 @@ void EvalState::mkStorePathString(const StorePath & p, Value & v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void EvalState::mkOutputString(
|
||||||
|
Value & value,
|
||||||
|
const StorePath & drvPath,
|
||||||
|
const std::string outputName,
|
||||||
|
std::optional<StorePath> optOutputPath)
|
||||||
|
{
|
||||||
|
value.mkString(
|
||||||
|
optOutputPath
|
||||||
|
? store->printStorePath(*std::move(optOutputPath))
|
||||||
|
/* Downstream we would substitute this for an actual path once
|
||||||
|
we build the floating CA derivation */
|
||||||
|
: DownstreamPlaceholder::unknownCaOutput(drvPath, outputName).render(),
|
||||||
|
NixStringContext {
|
||||||
|
NixStringContextElem::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.output = outputName,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Create a thunk for the delayed computation of the given expression
|
/* Create a thunk for the delayed computation of the given expression
|
||||||
in the given environment. But if the expression is a variable,
|
in the given environment. But if the expression is a variable,
|
||||||
then look it up right away. This significantly reduces the number
|
then look it up right away. This significantly reduces the number
|
||||||
|
@ -2298,6 +2319,80 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::pair<DerivedPath, std::string_view> EvalState::coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx)
|
||||||
|
{
|
||||||
|
NixStringContext context;
|
||||||
|
auto s = forceString(v, context, pos, errorCtx);
|
||||||
|
auto csize = context.size();
|
||||||
|
if (csize != 1)
|
||||||
|
error(
|
||||||
|
"string '%s' has %d entries in its context. It should only have exactly one entry",
|
||||||
|
s, csize)
|
||||||
|
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
|
auto derivedPath = std::visit(overloaded {
|
||||||
|
[&](NixStringContextElem::Opaque && o) -> DerivedPath {
|
||||||
|
return DerivedPath::Opaque {
|
||||||
|
.path = std::move(o.path),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[&](NixStringContextElem::DrvDeep &&) -> DerivedPath {
|
||||||
|
error(
|
||||||
|
"string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time",
|
||||||
|
s).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
|
},
|
||||||
|
[&](NixStringContextElem::Built && b) -> DerivedPath {
|
||||||
|
return DerivedPath::Built {
|
||||||
|
.drvPath = std::move(b.drvPath),
|
||||||
|
.outputs = OutputsSpec::Names { std::move(b.output) },
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}, ((NixStringContextElem &&) *context.begin()).raw());
|
||||||
|
return {
|
||||||
|
std::move(derivedPath),
|
||||||
|
std::move(s),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
DerivedPath EvalState::coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx)
|
||||||
|
{
|
||||||
|
auto [derivedPath, s_] = coerceToDerivedPathUnchecked(pos, v, errorCtx);
|
||||||
|
auto s = s_;
|
||||||
|
std::visit(overloaded {
|
||||||
|
[&](const DerivedPath::Opaque & o) {
|
||||||
|
auto sExpected = store->printStorePath(o.path);
|
||||||
|
if (s != sExpected)
|
||||||
|
error(
|
||||||
|
"path string '%s' has context with the different path '%s'",
|
||||||
|
s, sExpected)
|
||||||
|
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
|
},
|
||||||
|
[&](const DerivedPath::Built & b) {
|
||||||
|
// TODO need derived path with single output to make this
|
||||||
|
// total. Will add as part of RFC 92 work and then this is
|
||||||
|
// cleaned up.
|
||||||
|
auto output = *std::get<OutputsSpec::Names>(b.outputs).begin();
|
||||||
|
|
||||||
|
auto drv = store->readDerivation(b.drvPath);
|
||||||
|
auto i = drv.outputs.find(output);
|
||||||
|
if (i == drv.outputs.end())
|
||||||
|
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(b.drvPath), output);
|
||||||
|
auto optOutputPath = i->second.path(*store, drv.name, output);
|
||||||
|
// This is testing for the case of CA derivations
|
||||||
|
auto sExpected = optOutputPath
|
||||||
|
? store->printStorePath(*optOutputPath)
|
||||||
|
: DownstreamPlaceholder::unknownCaOutput(b.drvPath, output).render();
|
||||||
|
if (s != sExpected)
|
||||||
|
error(
|
||||||
|
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
|
||||||
|
s, output, store->printStorePath(b.drvPath), sExpected)
|
||||||
|
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
|
}
|
||||||
|
}, derivedPath.raw());
|
||||||
|
return derivedPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v1, noPos);
|
forceValue(v1, noPos);
|
||||||
|
|
|
@ -21,6 +21,7 @@ namespace nix {
|
||||||
class Store;
|
class Store;
|
||||||
class EvalState;
|
class EvalState;
|
||||||
class StorePath;
|
class StorePath;
|
||||||
|
struct DerivedPath;
|
||||||
enum RepairFlag : bool;
|
enum RepairFlag : bool;
|
||||||
|
|
||||||
|
|
||||||
|
@ -473,6 +474,28 @@ public:
|
||||||
*/
|
*/
|
||||||
StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx);
|
StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Part of `coerceToDerivedPath()` without any store IO which is exposed for unit testing only.
|
||||||
|
*/
|
||||||
|
std::pair<DerivedPath, std::string_view> coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Coerce to `DerivedPath`.
|
||||||
|
*
|
||||||
|
* Must be a string which is either a literal store path or a
|
||||||
|
* "placeholder (see `DownstreamPlaceholder`).
|
||||||
|
*
|
||||||
|
* Even more importantly, the string context must be exactly one
|
||||||
|
* element, which is either a `NixStringContextElem::Opaque` or
|
||||||
|
* `NixStringContextElem::Built`. (`NixStringContextEleme::DrvDeep`
|
||||||
|
* is not permitted).
|
||||||
|
*
|
||||||
|
* The string is parsed based on the context --- the context is the
|
||||||
|
* source of truth, and ultimately tells us what we want, and then
|
||||||
|
* we ensure the string corresponds to it.
|
||||||
|
*/
|
||||||
|
DerivedPath coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -576,12 +599,37 @@ public:
|
||||||
void mkThunk_(Value & v, Expr * expr);
|
void mkThunk_(Value & v, Expr * expr);
|
||||||
void mkPos(Value & v, PosIdx pos);
|
void mkPos(Value & v, PosIdx pos);
|
||||||
|
|
||||||
/* Create a string representing a store path.
|
/**
|
||||||
|
* Create a string representing a store path.
|
||||||
The string is the printed store path with a context containing a single
|
*
|
||||||
`Opaque` element of that store path. */
|
* The string is the printed store path with a context containing a single
|
||||||
|
* `NixStringContextElem::Opaque` element of that store path.
|
||||||
|
*/
|
||||||
void mkStorePathString(const StorePath & storePath, Value & v);
|
void mkStorePathString(const StorePath & storePath, Value & v);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a string representing a `DerivedPath::Built`.
|
||||||
|
*
|
||||||
|
* The string is the printed store path with a context containing a single
|
||||||
|
* `NixStringContextElem::Built` element of the drv path and output name.
|
||||||
|
*
|
||||||
|
* @param value Value we are settings
|
||||||
|
*
|
||||||
|
* @param drvPath Path the drv whose output we are making a string for
|
||||||
|
*
|
||||||
|
* @param outputName Name of the output
|
||||||
|
*
|
||||||
|
* @param optOutputPath Optional output path for that string. Must
|
||||||
|
* be passed if and only if output store object is input-addressed.
|
||||||
|
* Will be printed to form string if passed, otherwise a placeholder
|
||||||
|
* will be used (see `DownstreamPlaceholder`).
|
||||||
|
*/
|
||||||
|
void mkOutputString(
|
||||||
|
Value & value,
|
||||||
|
const StorePath & drvPath,
|
||||||
|
const std::string outputName,
|
||||||
|
std::optional<StorePath> optOutputPath);
|
||||||
|
|
||||||
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
#include "downstream-placeholder.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
@ -87,7 +88,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
auto outputs = resolveDerivedPath(*store, drv);
|
auto outputs = resolveDerivedPath(*store, drv);
|
||||||
for (auto & [outputName, outputPath] : outputs) {
|
for (auto & [outputName, outputPath] : outputs) {
|
||||||
res.insert_or_assign(
|
res.insert_or_assign(
|
||||||
downstreamPlaceholder(*store, drv.drvPath, outputName),
|
DownstreamPlaceholder::unknownCaOutput(drv.drvPath, outputName).render(),
|
||||||
store->printStorePath(outputPath)
|
store->printStorePath(outputPath)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -129,40 +130,31 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, co
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Add and attribute to the given attribute map from the output name to
|
/**
|
||||||
the output path, or a placeholder.
|
* Add and attribute to the given attribute map from the output name to
|
||||||
|
* the output path, or a placeholder.
|
||||||
Where possible the path is used, but for floating CA derivations we
|
*
|
||||||
may not know it. For sake of determinism we always assume we don't
|
* Where possible the path is used, but for floating CA derivations we
|
||||||
and instead put in a place holder. In either case, however, the
|
* may not know it. For sake of determinism we always assume we don't
|
||||||
string context will contain the drv path and output name, so
|
* and instead put in a place holder. In either case, however, the
|
||||||
downstream derivations will have the proper dependency, and in
|
* string context will contain the drv path and output name, so
|
||||||
addition, before building, the placeholder will be rewritten to be
|
* downstream derivations will have the proper dependency, and in
|
||||||
the actual path.
|
* addition, before building, the placeholder will be rewritten to be
|
||||||
|
* the actual path.
|
||||||
The 'drv' and 'drvPath' outputs must correspond. */
|
*
|
||||||
|
* The 'drv' and 'drvPath' outputs must correspond.
|
||||||
|
*/
|
||||||
static void mkOutputString(
|
static void mkOutputString(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
BindingsBuilder & attrs,
|
BindingsBuilder & attrs,
|
||||||
const StorePath & drvPath,
|
const StorePath & drvPath,
|
||||||
const BasicDerivation & drv,
|
|
||||||
const std::pair<std::string, DerivationOutput> & o)
|
const std::pair<std::string, DerivationOutput> & o)
|
||||||
{
|
{
|
||||||
auto optOutputPath = o.second.path(*state.store, drv.name, o.first);
|
state.mkOutputString(
|
||||||
attrs.alloc(o.first).mkString(
|
attrs.alloc(o.first),
|
||||||
optOutputPath
|
drvPath,
|
||||||
? state.store->printStorePath(*optOutputPath)
|
o.first,
|
||||||
/* Downstream we would substitute this for an actual path once
|
o.second.path(*state.store, Derivation::nameFromPath(drvPath), o.first));
|
||||||
we build the floating CA derivation */
|
|
||||||
/* FIXME: we need to depend on the basic derivation, not
|
|
||||||
derivation */
|
|
||||||
: downstreamPlaceholder(*state.store, drvPath, o.first),
|
|
||||||
NixStringContext {
|
|
||||||
NixStringContextElem::Built {
|
|
||||||
.drvPath = drvPath,
|
|
||||||
.output = o.first,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Load and evaluate an expression from path specified by the
|
/* Load and evaluate an expression from path specified by the
|
||||||
|
@ -193,7 +185,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
||||||
state.mkList(outputsVal, drv.outputs.size());
|
state.mkList(outputsVal, drv.outputs.size());
|
||||||
|
|
||||||
for (const auto & [i, o] : enumerate(drv.outputs)) {
|
for (const auto & [i, o] : enumerate(drv.outputs)) {
|
||||||
mkOutputString(state, attrs, *storePath, drv, o);
|
mkOutputString(state, attrs, *storePath, o);
|
||||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(o.first);
|
(outputsVal.listElems()[i] = state.allocValue())->mkString(o.first);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -706,12 +698,14 @@ static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
|
||||||
.arity = 1,
|
.arity = 1,
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Take an *attrset* with values named `startSet` and `operator` in order to
|
Take an *attrset* with values named `startSet` and `operator` in order to
|
||||||
return a *list of attrsets* by starting with the `startSet`, recursively
|
return a *list of attrsets* by starting with the `startSet` and recursively
|
||||||
applying the `operator` function to each element. The *attrsets* in the
|
applying the `operator` function to each `item`. The *attrsets* in the
|
||||||
`startSet` and produced by the `operator` must each contain value named
|
`startSet` and the *attrsets* produced by `operator` must contain a value
|
||||||
`key` which are comparable to each other. The result is produced by
|
named `key` which is comparable. The result is produced by calling `operator`
|
||||||
repeatedly calling the operator for each element encountered with a
|
for each `item` with a value for `key` that has not been called yet including
|
||||||
unique key, terminating when no new elements are produced. For example,
|
newly produced `item`s. The function terminates when no new `item`s are
|
||||||
|
produced. The resulting *list of attrsets* contains only *attrsets* with a
|
||||||
|
unique key. For example,
|
||||||
|
|
||||||
```
|
```
|
||||||
builtins.genericClosure {
|
builtins.genericClosure {
|
||||||
|
@ -1098,7 +1092,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
bool isImpure = false;
|
bool isImpure = false;
|
||||||
std::optional<std::string> outputHash;
|
std::optional<std::string> outputHash;
|
||||||
std::string outputHashAlgo;
|
std::string outputHashAlgo;
|
||||||
std::optional<FileIngestionMethod> ingestionMethod;
|
std::optional<ContentAddressMethod> ingestionMethod;
|
||||||
|
|
||||||
StringSet outputs;
|
StringSet outputs;
|
||||||
outputs.insert("out");
|
outputs.insert("out");
|
||||||
|
@ -1111,7 +1105,10 @@ drvName, Bindings * attrs, Value & v)
|
||||||
auto handleHashMode = [&](const std::string_view s) {
|
auto handleHashMode = [&](const std::string_view s) {
|
||||||
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
|
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
|
||||||
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
|
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
|
||||||
else
|
else if (s == "text") {
|
||||||
|
experimentalFeatureSettings.require(Xp::DynamicDerivations);
|
||||||
|
ingestionMethod = TextIngestionMethod {};
|
||||||
|
} else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||||
.errPos = state.positions[noPos]
|
.errPos = state.positions[noPos]
|
||||||
|
@ -1278,11 +1275,16 @@ drvName, Bindings * attrs, Value & v)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
/* Check whether the derivation name is valid. */
|
/* Check whether the derivation name is valid. */
|
||||||
if (isDerivation(drvName))
|
if (isDerivation(drvName) &&
|
||||||
|
!(ingestionMethod == ContentAddressMethod { TextIngestionMethod { } } &&
|
||||||
|
outputs.size() == 1 &&
|
||||||
|
*(outputs.begin()) == "out"))
|
||||||
|
{
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
|
.msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension),
|
||||||
.errPos = state.positions[noPos]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
if (outputHash) {
|
if (outputHash) {
|
||||||
/* Handle fixed-output derivations.
|
/* Handle fixed-output derivations.
|
||||||
|
@ -1298,21 +1300,15 @@ drvName, Bindings * attrs, Value & v)
|
||||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
||||||
|
|
||||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
||||||
auto outPath = state.store->makeFixedOutputPath(drvName, FixedOutputInfo {
|
|
||||||
.hash = {
|
DerivationOutput::CAFixed dof {
|
||||||
.method = method,
|
.ca = ContentAddress::fromParts(
|
||||||
.hash = h,
|
std::move(method),
|
||||||
},
|
std::move(h)),
|
||||||
.references = {},
|
};
|
||||||
});
|
|
||||||
drv.env["out"] = state.store->printStorePath(outPath);
|
drv.env["out"] = state.store->printStorePath(dof.path(*state.store, drvName, "out"));
|
||||||
drv.outputs.insert_or_assign("out",
|
drv.outputs.insert_or_assign("out", std::move(dof));
|
||||||
DerivationOutput::CAFixed {
|
|
||||||
.hash = FixedOutputHash {
|
|
||||||
.method = method,
|
|
||||||
.hash = std::move(h),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (contentAddressed || isImpure) {
|
else if (contentAddressed || isImpure) {
|
||||||
|
@ -1330,13 +1326,13 @@ drvName, Bindings * attrs, Value & v)
|
||||||
if (isImpure)
|
if (isImpure)
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput::Impure {
|
DerivationOutput::Impure {
|
||||||
.method = method,
|
.method = method.raw,
|
||||||
.hashType = ht,
|
.hashType = ht,
|
||||||
});
|
});
|
||||||
else
|
else
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput::CAFloating {
|
DerivationOutput::CAFloating {
|
||||||
.method = method,
|
.method = method.raw,
|
||||||
.hashType = ht,
|
.hashType = ht,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1401,7 +1397,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
NixStringContextElem::DrvDeep { .drvPath = drvPath },
|
NixStringContextElem::DrvDeep { .drvPath = drvPath },
|
||||||
});
|
});
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputs)
|
||||||
mkOutputString(state, result, drvPath, drv, i);
|
mkOutputString(state, result, drvPath, i);
|
||||||
|
|
||||||
v.mkAttrs(result);
|
v.mkAttrs(result);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#include "print.hh"
|
#include "print.hh"
|
||||||
|
#include <unordered_set>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -25,11 +26,26 @@ printLiteralBool(std::ostream & str, bool boolean)
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns `true' is a string is a reserved keyword which requires quotation
|
||||||
|
// when printing attribute set field names.
|
||||||
|
//
|
||||||
|
// This list should generally be kept in sync with `./lexer.l'.
|
||||||
|
// You can test if a keyword needs to be added by running:
|
||||||
|
// $ nix eval --expr '{ <KEYWORD> = 1; }'
|
||||||
|
// For example `or' doesn't need to be quoted.
|
||||||
|
bool isReservedKeyword(const std::string_view str)
|
||||||
|
{
|
||||||
|
static const std::unordered_set<std::string_view> reservedKeywords = {
|
||||||
|
"if", "then", "else", "assert", "with", "let", "in", "rec", "inherit"
|
||||||
|
};
|
||||||
|
return reservedKeywords.contains(str);
|
||||||
|
}
|
||||||
|
|
||||||
std::ostream &
|
std::ostream &
|
||||||
printIdentifier(std::ostream & str, std::string_view s) {
|
printIdentifier(std::ostream & str, std::string_view s) {
|
||||||
if (s.empty())
|
if (s.empty())
|
||||||
str << "\"\"";
|
str << "\"\"";
|
||||||
else if (s == "if") // FIXME: handle other keywords
|
else if (isReservedKeyword(s))
|
||||||
str << '"' << s << '"';
|
str << '"' << s << '"';
|
||||||
else {
|
else {
|
||||||
char c = s[0];
|
char c = s[0];
|
||||||
|
@ -50,10 +66,10 @@ printIdentifier(std::ostream & str, std::string_view s) {
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: keywords
|
|
||||||
static bool isVarName(std::string_view s)
|
static bool isVarName(std::string_view s)
|
||||||
{
|
{
|
||||||
if (s.size() == 0) return false;
|
if (s.size() == 0) return false;
|
||||||
|
if (isReservedKeyword(s)) return false;
|
||||||
char c = s[0];
|
char c = s[0];
|
||||||
if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false;
|
if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false;
|
||||||
for (auto & i : s)
|
for (auto & i : s)
|
||||||
|
|
|
@ -35,6 +35,12 @@ namespace nix {
|
||||||
*/
|
*/
|
||||||
std::ostream & printAttributeName(std::ostream & o, std::string_view s);
|
std::ostream & printAttributeName(std::ostream & o, std::string_view s);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns `true' is a string is a reserved keyword which requires quotation
|
||||||
|
* when printing attribute set field names.
|
||||||
|
*/
|
||||||
|
bool isReservedKeyword(const std::string_view str);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print a string as an identifier in the Nix expression language syntax.
|
* Print a string as an identifier in the Nix expression language syntax.
|
||||||
*
|
*
|
||||||
|
|
65
src/libexpr/tests/derived-path.cc
Normal file
65
src/libexpr/tests/derived-path.cc
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
#include <rapidcheck/gtest.h>
|
||||||
|
|
||||||
|
#include "tests/derived-path.hh"
|
||||||
|
#include "tests/libexpr.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
// Testing of trivial expressions
|
||||||
|
class DerivedPathExpressionTest : public LibExprTest {};
|
||||||
|
|
||||||
|
// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is
|
||||||
|
// no a real fixture.
|
||||||
|
//
|
||||||
|
// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args
|
||||||
|
TEST_F(DerivedPathExpressionTest, force_init)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
RC_GTEST_FIXTURE_PROP(
|
||||||
|
DerivedPathExpressionTest,
|
||||||
|
prop_opaque_path_round_trip,
|
||||||
|
(const DerivedPath::Opaque & o))
|
||||||
|
{
|
||||||
|
auto * v = state.allocValue();
|
||||||
|
state.mkStorePathString(o.path, *v);
|
||||||
|
auto d = state.coerceToDerivedPath(noPos, *v, "");
|
||||||
|
RC_ASSERT(DerivedPath { o } == d);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO use DerivedPath::Built for parameter once it supports a single output
|
||||||
|
// path only.
|
||||||
|
|
||||||
|
RC_GTEST_FIXTURE_PROP(
|
||||||
|
DerivedPathExpressionTest,
|
||||||
|
prop_built_path_placeholder_round_trip,
|
||||||
|
(const StorePath & drvPath, const StorePathName & outputName))
|
||||||
|
{
|
||||||
|
auto * v = state.allocValue();
|
||||||
|
state.mkOutputString(*v, drvPath, outputName.name, std::nullopt);
|
||||||
|
auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, "");
|
||||||
|
DerivedPath::Built b {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::Names { outputName.name },
|
||||||
|
};
|
||||||
|
RC_ASSERT(DerivedPath { b } == d);
|
||||||
|
}
|
||||||
|
|
||||||
|
RC_GTEST_FIXTURE_PROP(
|
||||||
|
DerivedPathExpressionTest,
|
||||||
|
prop_built_path_out_path_round_trip,
|
||||||
|
(const StorePath & drvPath, const StorePathName & outputName, const StorePath & outPath))
|
||||||
|
{
|
||||||
|
auto * v = state.allocValue();
|
||||||
|
state.mkOutputString(*v, drvPath, outputName.name, outPath);
|
||||||
|
auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, "");
|
||||||
|
DerivedPath::Built b {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::Names { outputName.name },
|
||||||
|
};
|
||||||
|
RC_ASSERT(DerivedPath { b } == d);
|
||||||
|
}
|
||||||
|
|
||||||
|
} /* namespace nix */
|
|
@ -95,13 +95,15 @@ Gen<NixStringContextElem::Built> Arbitrary<NixStringContextElem::Built>::arbitra
|
||||||
|
|
||||||
Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
|
Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
|
||||||
{
|
{
|
||||||
switch (*gen::inRange<uint8_t>(0, 2)) {
|
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<NixStringContextElem::Raw>)) {
|
||||||
case 0:
|
case 0:
|
||||||
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Opaque>());
|
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Opaque>());
|
||||||
case 1:
|
case 1:
|
||||||
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::DrvDeep>());
|
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::DrvDeep>());
|
||||||
default:
|
case 2:
|
||||||
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Built>());
|
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Built>());
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,6 +62,7 @@ std::optional<std::string> readHead(const Path & path)
|
||||||
.program = "git",
|
.program = "git",
|
||||||
// FIXME: use 'HEAD' to avoid returning all refs
|
// FIXME: use 'HEAD' to avoid returning all refs
|
||||||
.args = {"ls-remote", "--symref", path},
|
.args = {"ls-remote", "--symref", path},
|
||||||
|
.isInteractive = true,
|
||||||
});
|
});
|
||||||
if (status != 0) return std::nullopt;
|
if (status != 0) return std::nullopt;
|
||||||
|
|
||||||
|
@ -351,7 +352,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
args.push_back(destDir);
|
args.push_back(destDir);
|
||||||
|
|
||||||
runProgram("git", true, args);
|
runProgram("git", true, args, {}, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Path> getSourcePath(const Input & input) override
|
std::optional<Path> getSourcePath(const Input & input) override
|
||||||
|
@ -556,7 +557,7 @@ struct GitInputScheme : InputScheme
|
||||||
: ref == "HEAD"
|
: ref == "HEAD"
|
||||||
? *ref
|
? *ref
|
||||||
: "refs/heads/" + *ref;
|
: "refs/heads/" + *ref;
|
||||||
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, {}, true);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
if (!pathExists(localRefFile)) throw;
|
if (!pathExists(localRefFile)) throw;
|
||||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
||||||
|
@ -623,7 +624,7 @@ struct GitInputScheme : InputScheme
|
||||||
// everything to ensure we get the rev.
|
// everything to ensure we get the rev.
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir));
|
||||||
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
|
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
|
||||||
"--update-head-ok", "--", repoDir, "refs/*:refs/*" });
|
"--update-head-ok", "--", repoDir, "refs/*:refs/*" }, {}, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
|
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
|
||||||
|
@ -650,7 +651,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl));
|
||||||
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
|
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }, {}, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
filter = isNotDotGitDirectory;
|
filter = isNotDotGitDirectory;
|
||||||
|
|
|
@ -21,7 +21,7 @@ struct DownloadUrl
|
||||||
};
|
};
|
||||||
|
|
||||||
// A github, gitlab, or sourcehut host
|
// A github, gitlab, or sourcehut host
|
||||||
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
const static std::string hostRegexS = "[a-zA-Z0-9.-]*"; // FIXME: check
|
||||||
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||||
|
|
||||||
struct GitArchiveInputScheme : InputScheme
|
struct GitArchiveInputScheme : InputScheme
|
||||||
|
|
|
@ -274,11 +274,13 @@ void DerivationGoal::haveDerivation()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
else
|
else {
|
||||||
|
auto * cap = getDerivationCA(*drv);
|
||||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(
|
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(
|
||||||
status.known->path,
|
status.known->path,
|
||||||
buildMode == bmRepair ? Repair : NoRepair,
|
buildMode == bmRepair ? Repair : NoRepair,
|
||||||
getDerivationCA(*drv))));
|
cap ? std::optional { *cap } : std::nullopt)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (waitees.empty()) /* to prevent hang (no wake-up event) */
|
if (waitees.empty()) /* to prevent hang (no wake-up event) */
|
||||||
|
@ -1020,43 +1022,33 @@ void DerivationGoal::resolvedFinished()
|
||||||
|
|
||||||
StorePathSet outputPaths;
|
StorePathSet outputPaths;
|
||||||
|
|
||||||
// `wantedOutputs` might merely indicate “all the outputs”
|
for (auto & outputName : resolvedDrv.outputNames()) {
|
||||||
auto realWantedOutputs = std::visit(overloaded {
|
auto initialOutput = get(initialOutputs, outputName);
|
||||||
[&](const OutputsSpec::All &) {
|
auto resolvedHash = get(resolvedHashes, outputName);
|
||||||
return resolvedDrv.outputNames();
|
|
||||||
},
|
|
||||||
[&](const OutputsSpec::Names & names) {
|
|
||||||
return static_cast<std::set<std::string>>(names);
|
|
||||||
},
|
|
||||||
}, wantedOutputs.raw());
|
|
||||||
|
|
||||||
for (auto & wantedOutput : realWantedOutputs) {
|
|
||||||
auto initialOutput = get(initialOutputs, wantedOutput);
|
|
||||||
auto resolvedHash = get(resolvedHashes, wantedOutput);
|
|
||||||
if ((!initialOutput) || (!resolvedHash))
|
if ((!initialOutput) || (!resolvedHash))
|
||||||
throw Error(
|
throw Error(
|
||||||
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)",
|
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)",
|
||||||
worker.store.printStorePath(drvPath), wantedOutput);
|
worker.store.printStorePath(drvPath), outputName);
|
||||||
|
|
||||||
auto realisation = [&]{
|
auto realisation = [&]{
|
||||||
auto take1 = get(resolvedResult.builtOutputs, wantedOutput);
|
auto take1 = get(resolvedResult.builtOutputs, outputName);
|
||||||
if (take1) return *take1;
|
if (take1) return *take1;
|
||||||
|
|
||||||
/* The above `get` should work. But sateful tracking of
|
/* The above `get` should work. But sateful tracking of
|
||||||
outputs in resolvedResult, this can get out of sync with the
|
outputs in resolvedResult, this can get out of sync with the
|
||||||
store, which is our actual source of truth. For now we just
|
store, which is our actual source of truth. For now we just
|
||||||
check the store directly if it fails. */
|
check the store directly if it fails. */
|
||||||
auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, wantedOutput });
|
auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, outputName });
|
||||||
if (take2) return *take2;
|
if (take2) return *take2;
|
||||||
|
|
||||||
throw Error(
|
throw Error(
|
||||||
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
|
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
|
||||||
worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput);
|
worker.store.printStorePath(resolvedDrvGoal->drvPath), outputName);
|
||||||
}();
|
}();
|
||||||
|
|
||||||
if (drv->type().isPure()) {
|
if (drv->type().isPure()) {
|
||||||
auto newRealisation = realisation;
|
auto newRealisation = realisation;
|
||||||
newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput };
|
newRealisation.id = DrvOutput { initialOutput->outputHash, outputName };
|
||||||
newRealisation.signatures.clear();
|
newRealisation.signatures.clear();
|
||||||
if (!drv->type().isFixed())
|
if (!drv->type().isFixed())
|
||||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
|
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
|
||||||
|
@ -1064,7 +1056,7 @@ void DerivationGoal::resolvedFinished()
|
||||||
worker.store.registerDrvOutput(newRealisation);
|
worker.store.registerDrvOutput(newRealisation);
|
||||||
}
|
}
|
||||||
outputPaths.insert(realisation.outPath);
|
outputPaths.insert(realisation.outPath);
|
||||||
builtOutputs.emplace(wantedOutput, realisation);
|
builtOutputs.emplace(outputName, realisation);
|
||||||
}
|
}
|
||||||
|
|
||||||
runPostBuildHook(
|
runPostBuildHook(
|
||||||
|
@ -1406,7 +1398,7 @@ std::pair<bool, SingleDrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (info.wanted && info.known && info.known->isValid())
|
if (info.known && info.known->isValid())
|
||||||
validOutputs.emplace(i.first, Realisation { drvOutput, info.known->path });
|
validOutputs.emplace(i.first, Realisation { drvOutput, info.known->path });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1457,8 +1449,9 @@ void DerivationGoal::done(
|
||||||
mcRunningBuilds.reset();
|
mcRunningBuilds.reset();
|
||||||
|
|
||||||
if (buildResult.success()) {
|
if (buildResult.success()) {
|
||||||
assert(!builtOutputs.empty());
|
auto wantedBuiltOutputs = filterDrvOutputs(wantedOutputs, std::move(builtOutputs));
|
||||||
buildResult.builtOutputs = std::move(builtOutputs);
|
assert(!wantedBuiltOutputs.empty());
|
||||||
|
buildResult.builtOutputs = std::move(wantedBuiltOutputs);
|
||||||
if (status == BuildResult::Built)
|
if (status == BuildResult::Built)
|
||||||
worker.doneBuilds++;
|
worker.doneBuilds++;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -306,15 +306,13 @@ struct DerivationGoal : public Goal
|
||||||
* Update 'initialOutputs' to determine the current status of the
|
* Update 'initialOutputs' to determine the current status of the
|
||||||
* outputs of the derivation. Also returns a Boolean denoting
|
* outputs of the derivation. Also returns a Boolean denoting
|
||||||
* whether all outputs are valid and non-corrupt, and a
|
* whether all outputs are valid and non-corrupt, and a
|
||||||
* 'SingleDrvOutputs' structure containing the valid and wanted
|
* 'SingleDrvOutputs' structure containing the valid outputs.
|
||||||
* outputs.
|
|
||||||
*/
|
*/
|
||||||
std::pair<bool, SingleDrvOutputs> checkPathValidity();
|
std::pair<bool, SingleDrvOutputs> checkPathValidity();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Aborts if any output is not valid or corrupt, and otherwise
|
* Aborts if any output is not valid or corrupt, and otherwise
|
||||||
* returns a 'SingleDrvOutputs' structure containing the wanted
|
* returns a 'SingleDrvOutputs' structure containing all outputs.
|
||||||
* outputs.
|
|
||||||
*/
|
*/
|
||||||
SingleDrvOutputs assertPathValidity();
|
SingleDrvOutputs assertPathValidity();
|
||||||
|
|
||||||
|
@ -335,6 +333,8 @@ struct DerivationGoal : public Goal
|
||||||
void waiteeDone(GoalPtr waitee, ExitCode result) override;
|
void waiteeDone(GoalPtr waitee, ExitCode result) override;
|
||||||
|
|
||||||
StorePathSet exportReferences(const StorePathSet & storePaths);
|
StorePathSet exportReferences(const StorePathSet & storePaths);
|
||||||
|
|
||||||
|
JobCategory jobCategory() override { return JobCategory::Build; };
|
||||||
};
|
};
|
||||||
|
|
||||||
MakeError(NotDeterministic, BuildError);
|
MakeError(NotDeterministic, BuildError);
|
||||||
|
|
|
@ -21,7 +21,7 @@ class Worker;
|
||||||
class DrvOutputSubstitutionGoal : public Goal {
|
class DrvOutputSubstitutionGoal : public Goal {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The drv output we're trying to substitue
|
* The drv output we're trying to substitute
|
||||||
*/
|
*/
|
||||||
DrvOutput id;
|
DrvOutput id;
|
||||||
|
|
||||||
|
@ -72,6 +72,8 @@ public:
|
||||||
|
|
||||||
void work() override;
|
void work() override;
|
||||||
void handleEOF(int fd) override;
|
void handleEOF(int fd) override;
|
||||||
|
|
||||||
|
JobCategory jobCategory() override { return JobCategory::Substitution; };
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ void Store::ensurePath(const StorePath & path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LocalStore::repairPath(const StorePath & path)
|
void Store::repairPath(const StorePath & path)
|
||||||
{
|
{
|
||||||
Worker worker(*this, *this);
|
Worker worker(*this, *this);
|
||||||
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
|
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
|
||||||
|
|
|
@ -34,6 +34,17 @@ typedef std::set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||||
*/
|
*/
|
||||||
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used as a hint to the worker on how to schedule a particular goal. For example,
|
||||||
|
* builds are typically CPU- and memory-bound, while substitutions are I/O bound.
|
||||||
|
* Using this information, the worker might decide to schedule more or fewer goals
|
||||||
|
* of each category in parallel.
|
||||||
|
*/
|
||||||
|
enum struct JobCategory {
|
||||||
|
Build,
|
||||||
|
Substitution,
|
||||||
|
};
|
||||||
|
|
||||||
struct Goal : public std::enable_shared_from_this<Goal>
|
struct Goal : public std::enable_shared_from_this<Goal>
|
||||||
{
|
{
|
||||||
typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode;
|
typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode;
|
||||||
|
@ -150,6 +161,8 @@ public:
|
||||||
void amDone(ExitCode result, std::optional<Error> ex = {});
|
void amDone(ExitCode result, std::optional<Error> ex = {});
|
||||||
|
|
||||||
virtual void cleanup() { }
|
virtual void cleanup() { }
|
||||||
|
|
||||||
|
virtual JobCategory jobCategory() = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
void addToWeakGoals(WeakGoals & goals, GoalPtr p);
|
void addToWeakGoals(WeakGoals & goals, GoalPtr p);
|
||||||
|
|
|
@ -1776,6 +1776,8 @@ void LocalDerivationGoal::runChild()
|
||||||
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts" })
|
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts" })
|
||||||
if (pathExists(path))
|
if (pathExists(path))
|
||||||
ss.push_back(path);
|
ss.push_back(path);
|
||||||
|
|
||||||
|
dirsInChroot.emplace(settings.caFile, "/etc/ssl/certs/ca-certificates.crt");
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto & i : ss) dirsInChroot.emplace(i, i);
|
for (auto & i : ss) dirsInChroot.emplace(i, i);
|
||||||
|
@ -2426,37 +2428,51 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
throw BuildError(
|
throw BuildError(
|
||||||
"output path %1% without valid stats info",
|
"output path %1% without valid stats info",
|
||||||
actualPath);
|
actualPath);
|
||||||
if (outputHash.method == FileIngestionMethod::Flat) {
|
if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } ||
|
||||||
|
outputHash.method == ContentAddressMethod { TextIngestionMethod {} })
|
||||||
|
{
|
||||||
/* The output path should be a regular file without execute permission. */
|
/* The output path should be a regular file without execute permission. */
|
||||||
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
|
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
|
||||||
throw BuildError(
|
throw BuildError(
|
||||||
"output path '%1%' should be a non-executable regular file "
|
"output path '%1%' should be a non-executable regular file "
|
||||||
"since recursive hashing is not enabled (outputHashMode=flat)",
|
"since recursive hashing is not enabled (one of outputHashMode={flat,text} is true)",
|
||||||
actualPath);
|
actualPath);
|
||||||
}
|
}
|
||||||
rewriteOutput();
|
rewriteOutput();
|
||||||
/* FIXME optimize and deduplicate with addToStore */
|
/* FIXME optimize and deduplicate with addToStore */
|
||||||
std::string oldHashPart { scratchPath->hashPart() };
|
std::string oldHashPart { scratchPath->hashPart() };
|
||||||
HashModuloSink caSink { outputHash.hashType, oldHashPart };
|
HashModuloSink caSink { outputHash.hashType, oldHashPart };
|
||||||
switch (outputHash.method) {
|
std::visit(overloaded {
|
||||||
case FileIngestionMethod::Recursive:
|
[&](const TextIngestionMethod &) {
|
||||||
dumpPath(actualPath, caSink);
|
readFile(actualPath, caSink);
|
||||||
break;
|
},
|
||||||
case FileIngestionMethod::Flat:
|
[&](const FileIngestionMethod & m2) {
|
||||||
readFile(actualPath, caSink);
|
switch (m2) {
|
||||||
break;
|
case FileIngestionMethod::Recursive:
|
||||||
}
|
dumpPath(actualPath, caSink);
|
||||||
|
break;
|
||||||
|
case FileIngestionMethod::Flat:
|
||||||
|
readFile(actualPath, caSink);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}, outputHash.method.raw);
|
||||||
auto got = caSink.finish().first;
|
auto got = caSink.finish().first;
|
||||||
|
|
||||||
|
auto optCA = ContentAddressWithReferences::fromPartsOpt(
|
||||||
|
outputHash.method,
|
||||||
|
std::move(got),
|
||||||
|
rewriteRefs());
|
||||||
|
if (!optCA) {
|
||||||
|
// TODO track distinct failure modes separately (at the time of
|
||||||
|
// writing there is just one but `nullopt` is unclear) so this
|
||||||
|
// message can't get out of sync.
|
||||||
|
throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing");
|
||||||
|
}
|
||||||
ValidPathInfo newInfo0 {
|
ValidPathInfo newInfo0 {
|
||||||
worker.store,
|
worker.store,
|
||||||
outputPathName(drv->name, outputName),
|
outputPathName(drv->name, outputName),
|
||||||
FixedOutputInfo {
|
*std::move(optCA),
|
||||||
.hash = {
|
|
||||||
.method = outputHash.method,
|
|
||||||
.hash = got,
|
|
||||||
},
|
|
||||||
.references = rewriteRefs(),
|
|
||||||
},
|
|
||||||
Hash::dummy,
|
Hash::dummy,
|
||||||
};
|
};
|
||||||
if (*scratchPath != newInfo0.path) {
|
if (*scratchPath != newInfo0.path) {
|
||||||
|
@ -2503,13 +2519,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
},
|
},
|
||||||
|
|
||||||
[&](const DerivationOutput::CAFixed & dof) {
|
[&](const DerivationOutput::CAFixed & dof) {
|
||||||
|
auto wanted = dof.ca.getHash();
|
||||||
|
|
||||||
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
|
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
|
||||||
.method = dof.hash.method,
|
.method = dof.ca.getMethod(),
|
||||||
.hashType = dof.hash.hash.type,
|
.hashType = wanted.type,
|
||||||
});
|
});
|
||||||
|
|
||||||
/* Check wanted hash */
|
/* Check wanted hash */
|
||||||
const Hash & wanted = dof.hash.hash;
|
|
||||||
assert(newInfo0.ca);
|
assert(newInfo0.ca);
|
||||||
auto got = newInfo0.ca->getHash();
|
auto got = newInfo0.ca->getHash();
|
||||||
if (wanted != got) {
|
if (wanted != got) {
|
||||||
|
@ -2522,6 +2539,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
wanted.to_string(SRI, true),
|
wanted.to_string(SRI, true),
|
||||||
got.to_string(SRI, true)));
|
got.to_string(SRI, true)));
|
||||||
}
|
}
|
||||||
|
if (!newInfo0.references.empty())
|
||||||
|
delayedException = std::make_exception_ptr(
|
||||||
|
BuildError("illegal path references in fixed-output derivation '%s'",
|
||||||
|
worker.store.printStorePath(drvPath)));
|
||||||
|
|
||||||
return newInfo0;
|
return newInfo0;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -2701,8 +2723,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
signRealisation(thisRealisation);
|
signRealisation(thisRealisation);
|
||||||
worker.store.registerDrvOutput(thisRealisation);
|
worker.store.registerDrvOutput(thisRealisation);
|
||||||
}
|
}
|
||||||
if (wantedOutputs.contains(outputName))
|
builtOutputs.emplace(outputName, thisRealisation);
|
||||||
builtOutputs.emplace(outputName, thisRealisation);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return builtOutputs;
|
return builtOutputs;
|
||||||
|
|
|
@ -200,11 +200,10 @@ void PathSubstitutionGoal::tryToRun()
|
||||||
{
|
{
|
||||||
trace("trying to run");
|
trace("trying to run");
|
||||||
|
|
||||||
/* Make sure that we are allowed to start a build. Note that even
|
/* Make sure that we are allowed to start a substitution. Note that even
|
||||||
if maxBuildJobs == 0 (no local builds allowed), we still allow
|
if maxSubstitutionJobs == 0, we still allow a substituter to run. This
|
||||||
a substituter to run. This is because substitutions cannot be
|
prevents infinite waiting. */
|
||||||
distributed to another machine via the build hook. */
|
if (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) {
|
||||||
if (worker.getNrLocalBuilds() >= std::max(1U, (unsigned int) settings.maxBuildJobs)) {
|
|
||||||
worker.waitForBuildSlot(shared_from_this());
|
worker.waitForBuildSlot(shared_from_this());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,6 +115,8 @@ public:
|
||||||
void handleEOF(int fd) override;
|
void handleEOF(int fd) override;
|
||||||
|
|
||||||
void cleanup() override;
|
void cleanup() override;
|
||||||
|
|
||||||
|
JobCategory jobCategory() override { return JobCategory::Substitution; };
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ Worker::Worker(Store & store, Store & evalStore)
|
||||||
{
|
{
|
||||||
/* Debugging: prevent recursive workers. */
|
/* Debugging: prevent recursive workers. */
|
||||||
nrLocalBuilds = 0;
|
nrLocalBuilds = 0;
|
||||||
|
nrSubstitutions = 0;
|
||||||
lastWokenUp = steady_time_point::min();
|
lastWokenUp = steady_time_point::min();
|
||||||
permanentFailure = false;
|
permanentFailure = false;
|
||||||
timedOut = false;
|
timedOut = false;
|
||||||
|
@ -176,6 +177,12 @@ unsigned Worker::getNrLocalBuilds()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
unsigned Worker::getNrSubstitutions()
|
||||||
|
{
|
||||||
|
return nrSubstitutions;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
|
void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||||
bool inBuildSlot, bool respectTimeouts)
|
bool inBuildSlot, bool respectTimeouts)
|
||||||
{
|
{
|
||||||
|
@ -187,7 +194,10 @@ void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||||
child.inBuildSlot = inBuildSlot;
|
child.inBuildSlot = inBuildSlot;
|
||||||
child.respectTimeouts = respectTimeouts;
|
child.respectTimeouts = respectTimeouts;
|
||||||
children.emplace_back(child);
|
children.emplace_back(child);
|
||||||
if (inBuildSlot) nrLocalBuilds++;
|
if (inBuildSlot) {
|
||||||
|
if (goal->jobCategory() == JobCategory::Substitution) nrSubstitutions++;
|
||||||
|
else nrLocalBuilds++;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -198,8 +208,13 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
||||||
if (i == children.end()) return;
|
if (i == children.end()) return;
|
||||||
|
|
||||||
if (i->inBuildSlot) {
|
if (i->inBuildSlot) {
|
||||||
assert(nrLocalBuilds > 0);
|
if (goal->jobCategory() == JobCategory::Substitution) {
|
||||||
nrLocalBuilds--;
|
assert(nrSubstitutions > 0);
|
||||||
|
nrSubstitutions--;
|
||||||
|
} else {
|
||||||
|
assert(nrLocalBuilds > 0);
|
||||||
|
nrLocalBuilds--;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
children.erase(i);
|
children.erase(i);
|
||||||
|
@ -220,7 +235,9 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
||||||
void Worker::waitForBuildSlot(GoalPtr goal)
|
void Worker::waitForBuildSlot(GoalPtr goal)
|
||||||
{
|
{
|
||||||
debug("wait for build slot");
|
debug("wait for build slot");
|
||||||
if (getNrLocalBuilds() < settings.maxBuildJobs)
|
bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution;
|
||||||
|
if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) ||
|
||||||
|
(isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs))
|
||||||
wakeUp(goal); /* we can do it right away */
|
wakeUp(goal); /* we can do it right away */
|
||||||
else
|
else
|
||||||
addToWeakGoals(wantingToBuild, goal);
|
addToWeakGoals(wantingToBuild, goal);
|
||||||
|
|
|
@ -88,11 +88,16 @@ private:
|
||||||
std::list<Child> children;
|
std::list<Child> children;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Number of build slots occupied. This includes local builds and
|
* Number of build slots occupied. This includes local builds but does not
|
||||||
* substitutions but not remote builds via the build hook.
|
* include substitutions or remote builds via the build hook.
|
||||||
*/
|
*/
|
||||||
unsigned int nrLocalBuilds;
|
unsigned int nrLocalBuilds;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of substitution slots occupied.
|
||||||
|
*/
|
||||||
|
unsigned int nrSubstitutions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maps used to prevent multiple instantiations of a goal for the
|
* Maps used to prevent multiple instantiations of a goal for the
|
||||||
* same derivation / path.
|
* same derivation / path.
|
||||||
|
@ -220,12 +225,16 @@ public:
|
||||||
void wakeUp(GoalPtr goal);
|
void wakeUp(GoalPtr goal);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the number of local build and substitution processes
|
* Return the number of local build processes currently running (but not
|
||||||
* currently running (but not remote builds via the build
|
* remote builds via the build hook).
|
||||||
* hook).
|
|
||||||
*/
|
*/
|
||||||
unsigned int getNrLocalBuilds();
|
unsigned int getNrLocalBuilds();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the number of substitution processes currently running.
|
||||||
|
*/
|
||||||
|
unsigned int getNrSubstitutions();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registers a running child process. `inBuildSlot` means that
|
* Registers a running child process. `inBuildSlot` means that
|
||||||
* the process counts towards the jobs limit.
|
* the process counts towards the jobs limit.
|
||||||
|
|
|
@ -21,6 +21,27 @@ std::string makeFileIngestionPrefix(FileIngestionMethod m)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string ContentAddressMethod::renderPrefix() const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](TextIngestionMethod) -> std::string { return "text:"; },
|
||||||
|
[](FileIngestionMethod m2) {
|
||||||
|
/* Not prefixed for back compat with things that couldn't produce text before. */
|
||||||
|
return makeFileIngestionPrefix(m2);
|
||||||
|
},
|
||||||
|
}, raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
|
||||||
|
{
|
||||||
|
ContentAddressMethod method = FileIngestionMethod::Flat;
|
||||||
|
if (splitPrefix(m, "r:"))
|
||||||
|
method = FileIngestionMethod::Recursive;
|
||||||
|
else if (splitPrefix(m, "text:"))
|
||||||
|
method = TextIngestionMethod {};
|
||||||
|
return method;
|
||||||
|
}
|
||||||
|
|
||||||
std::string ContentAddress::render() const
|
std::string ContentAddress::render() const
|
||||||
{
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
|
@ -36,14 +57,14 @@ std::string ContentAddress::render() const
|
||||||
}, raw);
|
}, raw);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string ContentAddressMethod::render() const
|
std::string ContentAddressMethod::render(HashType ht) const
|
||||||
{
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[](const TextHashMethod & th) {
|
[&](const TextIngestionMethod & th) {
|
||||||
return std::string{"text:"} + printHashType(htSHA256);
|
return std::string{"text:"} + printHashType(ht);
|
||||||
},
|
},
|
||||||
[](const FixedOutputHashMethod & fshm) {
|
[&](const FileIngestionMethod & fim) {
|
||||||
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
|
return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht);
|
||||||
}
|
}
|
||||||
}, raw);
|
}, raw);
|
||||||
}
|
}
|
||||||
|
@ -51,7 +72,7 @@ std::string ContentAddressMethod::render() const
|
||||||
/**
|
/**
|
||||||
* Parses content address strings up to the hash.
|
* Parses content address strings up to the hash.
|
||||||
*/
|
*/
|
||||||
static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & rest)
|
static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix(std::string_view & rest)
|
||||||
{
|
{
|
||||||
std::string_view wholeInput { rest };
|
std::string_view wholeInput { rest };
|
||||||
|
|
||||||
|
@ -75,46 +96,47 @@ static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & r
|
||||||
if (prefix == "text") {
|
if (prefix == "text") {
|
||||||
// No parsing of the ingestion method, "text" only support flat.
|
// No parsing of the ingestion method, "text" only support flat.
|
||||||
HashType hashType = parseHashType_();
|
HashType hashType = parseHashType_();
|
||||||
if (hashType != htSHA256)
|
return {
|
||||||
throw Error("text content address hash should use %s, but instead uses %s",
|
TextIngestionMethod {},
|
||||||
printHashType(htSHA256), printHashType(hashType));
|
std::move(hashType),
|
||||||
return TextHashMethod {};
|
};
|
||||||
} else if (prefix == "fixed") {
|
} else if (prefix == "fixed") {
|
||||||
// Parse method
|
// Parse method
|
||||||
auto method = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
if (splitPrefix(rest, "r:"))
|
if (splitPrefix(rest, "r:"))
|
||||||
method = FileIngestionMethod::Recursive;
|
method = FileIngestionMethod::Recursive;
|
||||||
HashType hashType = parseHashType_();
|
HashType hashType = parseHashType_();
|
||||||
return FixedOutputHashMethod {
|
return {
|
||||||
.fileIngestionMethod = method,
|
std::move(method),
|
||||||
.hashType = std::move(hashType),
|
std::move(hashType),
|
||||||
};
|
};
|
||||||
} else
|
} else
|
||||||
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
|
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
|
||||||
}
|
}
|
||||||
|
|
||||||
ContentAddress ContentAddress::parse(std::string_view rawCa) {
|
ContentAddress ContentAddress::parse(std::string_view rawCa)
|
||||||
|
{
|
||||||
auto rest = rawCa;
|
auto rest = rawCa;
|
||||||
|
|
||||||
ContentAddressMethod caMethod = parseContentAddressMethodPrefix(rest);
|
auto [caMethod, hashType_] = parseContentAddressMethodPrefix(rest);
|
||||||
|
auto hashType = hashType_; // work around clang bug
|
||||||
|
|
||||||
return std::visit(
|
return std::visit(overloaded {
|
||||||
overloaded {
|
[&](TextIngestionMethod &) {
|
||||||
[&](TextHashMethod & thm) {
|
return ContentAddress(TextHash {
|
||||||
return ContentAddress(TextHash {
|
.hash = Hash::parseNonSRIUnprefixed(rest, hashType)
|
||||||
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
|
});
|
||||||
});
|
},
|
||||||
},
|
[&](FileIngestionMethod & fim) {
|
||||||
[&](FixedOutputHashMethod & fohMethod) {
|
return ContentAddress(FixedOutputHash {
|
||||||
return ContentAddress(FixedOutputHash {
|
.method = fim,
|
||||||
.method = fohMethod.fileIngestionMethod,
|
.hash = Hash::parseNonSRIUnprefixed(rest, hashType),
|
||||||
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
|
});
|
||||||
});
|
},
|
||||||
},
|
}, caMethod.raw);
|
||||||
}, caMethod.raw);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ContentAddressMethod ContentAddressMethod::parse(std::string_view caMethod)
|
std::pair<ContentAddressMethod, HashType> ContentAddressMethod::parse(std::string_view caMethod)
|
||||||
{
|
{
|
||||||
std::string asPrefix = std::string{caMethod} + ":";
|
std::string asPrefix = std::string{caMethod} + ":";
|
||||||
// parseContentAddressMethodPrefix takes its argument by reference
|
// parseContentAddressMethodPrefix takes its argument by reference
|
||||||
|
@ -134,6 +156,36 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
|
||||||
return ca ? ca->render() : "";
|
return ca ? ca->render() : "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ContentAddress ContentAddress::fromParts(
|
||||||
|
ContentAddressMethod method, Hash hash) noexcept
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](TextIngestionMethod _) -> ContentAddress {
|
||||||
|
return TextHash {
|
||||||
|
.hash = std::move(hash),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[&](FileIngestionMethod m2) -> ContentAddress {
|
||||||
|
return FixedOutputHash {
|
||||||
|
.method = std::move(m2),
|
||||||
|
.hash = std::move(hash),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}, method.raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
ContentAddressMethod ContentAddress::getMethod() const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](const TextHash & th) -> ContentAddressMethod {
|
||||||
|
return TextIngestionMethod {};
|
||||||
|
},
|
||||||
|
[](const FixedOutputHash & fsh) -> ContentAddressMethod {
|
||||||
|
return fsh.method;
|
||||||
|
},
|
||||||
|
}, raw);
|
||||||
|
}
|
||||||
|
|
||||||
const Hash & ContentAddress::getHash() const
|
const Hash & ContentAddress::getHash() const
|
||||||
{
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
|
@ -146,6 +198,12 @@ const Hash & ContentAddress::getHash() const
|
||||||
}, raw);
|
}, raw);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string ContentAddress::printMethodAlgo() const
|
||||||
|
{
|
||||||
|
return getMethod().renderPrefix()
|
||||||
|
+ printHashType(getHash().type);
|
||||||
|
}
|
||||||
|
|
||||||
bool StoreReferences::empty() const
|
bool StoreReferences::empty() const
|
||||||
{
|
{
|
||||||
return !self && others.empty();
|
return !self && others.empty();
|
||||||
|
@ -156,7 +214,8 @@ size_t StoreReferences::size() const
|
||||||
return (self ? 1 : 0) + others.size();
|
return (self ? 1 : 0) + others.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) {
|
ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept
|
||||||
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[&](const TextHash & h) -> ContentAddressWithReferences {
|
[&](const TextHash & h) -> ContentAddressWithReferences {
|
||||||
return TextInfo {
|
return TextInfo {
|
||||||
|
@ -173,4 +232,56 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con
|
||||||
}, ca.raw);
|
}, ca.raw);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPartsOpt(
|
||||||
|
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](TextIngestionMethod _) -> std::optional<ContentAddressWithReferences> {
|
||||||
|
if (refs.self)
|
||||||
|
return std::nullopt;
|
||||||
|
return ContentAddressWithReferences {
|
||||||
|
TextInfo {
|
||||||
|
.hash = { .hash = std::move(hash) },
|
||||||
|
.references = std::move(refs.others),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[&](FileIngestionMethod m2) -> std::optional<ContentAddressWithReferences> {
|
||||||
|
return ContentAddressWithReferences {
|
||||||
|
FixedOutputInfo {
|
||||||
|
.hash = {
|
||||||
|
.method = m2,
|
||||||
|
.hash = std::move(hash),
|
||||||
|
},
|
||||||
|
.references = std::move(refs),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}, method.raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
ContentAddressMethod ContentAddressWithReferences::getMethod() const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](const TextInfo & th) -> ContentAddressMethod {
|
||||||
|
return TextIngestionMethod {};
|
||||||
|
},
|
||||||
|
[](const FixedOutputInfo & fsh) -> ContentAddressMethod {
|
||||||
|
return fsh.hash.method;
|
||||||
|
},
|
||||||
|
}, raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
Hash ContentAddressWithReferences::getHash() const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](const TextInfo & th) {
|
||||||
|
return th.hash.hash;
|
||||||
|
},
|
||||||
|
[](const FixedOutputInfo & fsh) {
|
||||||
|
return fsh.hash.hash;
|
||||||
|
},
|
||||||
|
}, raw);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,14 @@ namespace nix {
|
||||||
*
|
*
|
||||||
* Somewhat obscure, used by \ref Derivation derivations and
|
* Somewhat obscure, used by \ref Derivation derivations and
|
||||||
* `builtins.toFile` currently.
|
* `builtins.toFile` currently.
|
||||||
|
*
|
||||||
|
* TextIngestionMethod is identical to FileIngestionMethod::Fixed except that
|
||||||
|
* the former may not have self-references and is tagged `text:${algo}:${hash}`
|
||||||
|
* rather than `fixed:${algo}:${hash}`. The contents of the store path are
|
||||||
|
* ingested and hashed identically, aside from the slightly different tag and
|
||||||
|
* restriction on self-references.
|
||||||
*/
|
*/
|
||||||
struct TextHashMethod : std::monostate { };
|
struct TextIngestionMethod : std::monostate { };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An enumeration of the main ways we can serialize file system
|
* An enumeration of the main ways we can serialize file system
|
||||||
|
@ -46,13 +52,6 @@ enum struct FileIngestionMethod : uint8_t {
|
||||||
*/
|
*/
|
||||||
std::string makeFileIngestionPrefix(FileIngestionMethod m);
|
std::string makeFileIngestionPrefix(FileIngestionMethod m);
|
||||||
|
|
||||||
struct FixedOutputHashMethod {
|
|
||||||
FileIngestionMethod fileIngestionMethod;
|
|
||||||
HashType hashType;
|
|
||||||
|
|
||||||
GENERATE_CMP(FixedOutputHashMethod, me->fileIngestionMethod, me->hashType);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An enumeration of all the ways we can serialize file system objects.
|
* An enumeration of all the ways we can serialize file system objects.
|
||||||
*
|
*
|
||||||
|
@ -64,8 +63,8 @@ struct FixedOutputHashMethod {
|
||||||
struct ContentAddressMethod
|
struct ContentAddressMethod
|
||||||
{
|
{
|
||||||
typedef std::variant<
|
typedef std::variant<
|
||||||
TextHashMethod,
|
TextIngestionMethod,
|
||||||
FixedOutputHashMethod
|
FileIngestionMethod
|
||||||
> Raw;
|
> Raw;
|
||||||
|
|
||||||
Raw raw;
|
Raw raw;
|
||||||
|
@ -77,9 +76,36 @@ struct ContentAddressMethod
|
||||||
: raw(std::forward<decltype(arg)>(arg)...)
|
: raw(std::forward<decltype(arg)>(arg)...)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
static ContentAddressMethod parse(std::string_view rawCaMethod);
|
|
||||||
|
|
||||||
std::string render() const;
|
/**
|
||||||
|
* Parse the prefix tag which indicates how the files
|
||||||
|
* were ingested, with the fixed output case not prefixed for back
|
||||||
|
* compat.
|
||||||
|
*
|
||||||
|
* @param [in] m A string that should begin with the prefix.
|
||||||
|
* @param [out] m The remainder of the string after the prefix.
|
||||||
|
*/
|
||||||
|
static ContentAddressMethod parsePrefix(std::string_view & m);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the prefix tag which indicates how the files wre ingested.
|
||||||
|
*
|
||||||
|
* The rough inverse of `parsePrefix()`.
|
||||||
|
*/
|
||||||
|
std::string renderPrefix() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a content addressing method and hash type.
|
||||||
|
*/
|
||||||
|
static std::pair<ContentAddressMethod, HashType> parse(std::string_view rawCaMethod);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render a content addressing method and hash type in a
|
||||||
|
* nicer way, prefixing both cases.
|
||||||
|
*
|
||||||
|
* The rough inverse of `parse()`.
|
||||||
|
*/
|
||||||
|
std::string render(HashType ht) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -147,8 +173,9 @@ struct ContentAddress
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compute the content-addressability assertion (ValidPathInfo::ca) for
|
* Compute the content-addressability assertion
|
||||||
* paths created by Store::makeFixedOutputPath() / Store::addToStore().
|
* (`ValidPathInfo::ca`) for paths created by
|
||||||
|
* `Store::makeFixedOutputPath()` / `Store::addToStore()`.
|
||||||
*/
|
*/
|
||||||
std::string render() const;
|
std::string render() const;
|
||||||
|
|
||||||
|
@ -156,9 +183,27 @@ struct ContentAddress
|
||||||
|
|
||||||
static std::optional<ContentAddress> parseOpt(std::string_view rawCaOpt);
|
static std::optional<ContentAddress> parseOpt(std::string_view rawCaOpt);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a `ContentAddress` from 2 parts:
|
||||||
|
*
|
||||||
|
* @param method Way ingesting the file system data.
|
||||||
|
*
|
||||||
|
* @param hash Hash of ingested file system data.
|
||||||
|
*/
|
||||||
|
static ContentAddress fromParts(
|
||||||
|
ContentAddressMethod method, Hash hash) noexcept;
|
||||||
|
|
||||||
|
ContentAddressMethod getMethod() const;
|
||||||
|
|
||||||
const Hash & getHash() const;
|
const Hash & getHash() const;
|
||||||
|
|
||||||
|
std::string printMethodAlgo() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the `ContentAddress` if it exists to a string, return empty
|
||||||
|
* string otherwise.
|
||||||
|
*/
|
||||||
std::string renderContentAddress(std::optional<ContentAddress> ca);
|
std::string renderContentAddress(std::optional<ContentAddress> ca);
|
||||||
|
|
||||||
|
|
||||||
|
@ -244,10 +289,29 @@ struct ContentAddressWithReferences
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a ContentAddressWithReferences from a mere ContentAddress, by
|
* Create a `ContentAddressWithReferences` from a mere
|
||||||
* assuming no references in all cases.
|
* `ContentAddress`, by claiming no references.
|
||||||
*/
|
*/
|
||||||
static ContentAddressWithReferences withoutRefs(const ContentAddress &);
|
static ContentAddressWithReferences withoutRefs(const ContentAddress &) noexcept;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a `ContentAddressWithReferences` from 3 parts:
|
||||||
|
*
|
||||||
|
* @param method Way ingesting the file system data.
|
||||||
|
*
|
||||||
|
* @param hash Hash of ingested file system data.
|
||||||
|
*
|
||||||
|
* @param refs References to other store objects or oneself.
|
||||||
|
*
|
||||||
|
* Do note that not all combinations are supported; `nullopt` is
|
||||||
|
* returns for invalid combinations.
|
||||||
|
*/
|
||||||
|
static std::optional<ContentAddressWithReferences> fromPartsOpt(
|
||||||
|
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept;
|
||||||
|
|
||||||
|
ContentAddressMethod getMethod() const;
|
||||||
|
|
||||||
|
Hash getHash() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -401,18 +401,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto pathInfo = [&]() {
|
auto pathInfo = [&]() {
|
||||||
// NB: FramedSource must be out of scope before logger->stopWork();
|
// NB: FramedSource must be out of scope before logger->stopWork();
|
||||||
ContentAddressMethod contentAddressMethod = ContentAddressMethod::parse(camStr);
|
auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr);
|
||||||
|
auto hashType = hashType_; // work around clang bug
|
||||||
FramedSource source(from);
|
FramedSource source(from);
|
||||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[&](const TextHashMethod &) {
|
[&](const TextIngestionMethod &) {
|
||||||
|
if (hashType != htSHA256)
|
||||||
|
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||||
|
name, printHashType(hashType));
|
||||||
// We could stream this by changing Store
|
// We could stream this by changing Store
|
||||||
std::string contents = source.drain();
|
std::string contents = source.drain();
|
||||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||||
return store->queryPathInfo(path);
|
return store->queryPathInfo(path);
|
||||||
},
|
},
|
||||||
[&](const FixedOutputHashMethod & fohm) {
|
[&](const FileIngestionMethod & fim) {
|
||||||
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair, refs);
|
auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs);
|
||||||
return store->queryPathInfo(path);
|
return store->queryPathInfo(path);
|
||||||
},
|
},
|
||||||
}, contentAddressMethod.raw);
|
}, contentAddressMethod.raw);
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
#include "downstream-placeholder.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
#include "split.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
#include "fs-accessor.hh"
|
#include "fs-accessor.hh"
|
||||||
#include <boost/container/small_vector.hpp>
|
#include <boost/container/small_vector.hpp>
|
||||||
|
@ -35,9 +37,9 @@ std::optional<StorePath> DerivationOutput::path(const Store & store, std::string
|
||||||
|
|
||||||
StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const
|
StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const
|
||||||
{
|
{
|
||||||
return store.makeFixedOutputPath(
|
return store.makeFixedOutputPathFromCA(
|
||||||
outputPathName(drvName, outputName),
|
outputPathName(drvName, outputName),
|
||||||
{ hash, {} });
|
ContentAddressWithReferences::withoutRefs(ca));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -211,29 +213,27 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
|
||||||
|
|
||||||
|
|
||||||
static DerivationOutput parseDerivationOutput(const Store & store,
|
static DerivationOutput parseDerivationOutput(const Store & store,
|
||||||
std::string_view pathS, std::string_view hashAlgo, std::string_view hash)
|
std::string_view pathS, std::string_view hashAlgo, std::string_view hashS)
|
||||||
{
|
{
|
||||||
if (hashAlgo != "") {
|
if (hashAlgo != "") {
|
||||||
auto method = FileIngestionMethod::Flat;
|
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo);
|
||||||
if (hashAlgo.substr(0, 2) == "r:") {
|
if (method == TextIngestionMethod {})
|
||||||
method = FileIngestionMethod::Recursive;
|
experimentalFeatureSettings.require(Xp::DynamicDerivations);
|
||||||
hashAlgo = hashAlgo.substr(2);
|
|
||||||
}
|
|
||||||
const auto hashType = parseHashType(hashAlgo);
|
const auto hashType = parseHashType(hashAlgo);
|
||||||
if (hash == "impure") {
|
if (hashS == "impure") {
|
||||||
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
||||||
assert(pathS == "");
|
assert(pathS == "");
|
||||||
return DerivationOutput::Impure {
|
return DerivationOutput::Impure {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hashType = std::move(hashType),
|
.hashType = std::move(hashType),
|
||||||
};
|
};
|
||||||
} else if (hash != "") {
|
} else if (hashS != "") {
|
||||||
validatePath(pathS);
|
validatePath(pathS);
|
||||||
|
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType);
|
||||||
return DerivationOutput::CAFixed {
|
return DerivationOutput::CAFixed {
|
||||||
.hash = FixedOutputHash {
|
.ca = ContentAddress::fromParts(
|
||||||
.method = std::move(method),
|
std::move(method),
|
||||||
.hash = Hash::parseNonSRIUnprefixed(hash, hashType),
|
std::move(hash)),
|
||||||
},
|
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||||
|
@ -393,12 +393,12 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFixed & dof) {
|
[&](const DerivationOutput::CAFixed & dof) {
|
||||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
|
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
|
||||||
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
|
s += ','; printUnquotedString(s, dof.ca.printMethodAlgo());
|
||||||
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
|
s += ','; printUnquotedString(s, dof.ca.getHash().to_string(Base16, false));
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
s += ','; printUnquotedString(s, "");
|
s += ','; printUnquotedString(s, "");
|
||||||
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType));
|
||||||
s += ','; printUnquotedString(s, "");
|
s += ','; printUnquotedString(s, "");
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Deferred &) {
|
[&](const DerivationOutput::Deferred &) {
|
||||||
|
@ -409,7 +409,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||||
[&](const DerivationOutputImpure & doi) {
|
[&](const DerivationOutputImpure & doi) {
|
||||||
// FIXME
|
// FIXME
|
||||||
s += ','; printUnquotedString(s, "");
|
s += ','; printUnquotedString(s, "");
|
||||||
s += ','; printUnquotedString(s, makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType));
|
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType));
|
||||||
s += ','; printUnquotedString(s, "impure");
|
s += ','; printUnquotedString(s, "impure");
|
||||||
}
|
}
|
||||||
}, i.second.raw());
|
}, i.second.raw());
|
||||||
|
@ -626,8 +626,8 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||||
for (const auto & i : drv.outputs) {
|
for (const auto & i : drv.outputs) {
|
||||||
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw());
|
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw());
|
||||||
auto hash = hashString(htSHA256, "fixed:out:"
|
auto hash = hashString(htSHA256, "fixed:out:"
|
||||||
+ dof.hash.printMethodAlgo() + ":"
|
+ dof.ca.printMethodAlgo() + ":"
|
||||||
+ dof.hash.hash.to_string(Base16, false) + ":"
|
+ dof.ca.getHash().to_string(Base16, false) + ":"
|
||||||
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
||||||
outputHashes.insert_or_assign(i.first, std::move(hash));
|
outputHashes.insert_or_assign(i.first, std::move(hash));
|
||||||
}
|
}
|
||||||
|
@ -777,12 +777,12 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFixed & dof) {
|
[&](const DerivationOutput::CAFixed & dof) {
|
||||||
out << store.printStorePath(dof.path(store, drv.name, i.first))
|
out << store.printStorePath(dof.path(store, drv.name, i.first))
|
||||||
<< dof.hash.printMethodAlgo()
|
<< dof.ca.printMethodAlgo()
|
||||||
<< dof.hash.hash.to_string(Base16, false);
|
<< dof.ca.getHash().to_string(Base16, false);
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
out << ""
|
out << ""
|
||||||
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
|
<< (dof.method.renderPrefix() + printHashType(dof.hashType))
|
||||||
<< "";
|
<< "";
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Deferred &) {
|
[&](const DerivationOutput::Deferred &) {
|
||||||
|
@ -792,7 +792,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
out << ""
|
out << ""
|
||||||
<< (makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType))
|
<< (doi.method.renderPrefix() + printHashType(doi.hashType))
|
||||||
<< "impure";
|
<< "impure";
|
||||||
},
|
},
|
||||||
}, i.second.raw());
|
}, i.second.raw());
|
||||||
|
@ -811,13 +811,7 @@ std::string hashPlaceholder(const std::string_view outputName)
|
||||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
|
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
|
|
||||||
{
|
|
||||||
auto drvNameWithExtension = drvPath.name();
|
|
||||||
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
|
|
||||||
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
|
|
||||||
return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
|
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
|
||||||
|
@ -881,7 +875,7 @@ std::optional<BasicDerivation> Derivation::tryResolve(
|
||||||
for (auto & outputName : inputOutputs) {
|
for (auto & outputName : inputOutputs) {
|
||||||
if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) {
|
if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) {
|
||||||
inputRewrites.emplace(
|
inputRewrites.emplace(
|
||||||
downstreamPlaceholder(store, inputDrv, outputName),
|
DownstreamPlaceholder::unknownCaOutput(inputDrv, outputName).render(),
|
||||||
store.printStorePath(*actualPath));
|
store.printStorePath(*actualPath));
|
||||||
resolved.inputSrcs.insert(*actualPath);
|
resolved.inputSrcs.insert(*actualPath);
|
||||||
} else {
|
} else {
|
||||||
|
@ -942,7 +936,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
|
||||||
envHasRightPath(doia.path, i.first);
|
envHasRightPath(doia.path, i.first);
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFixed & dof) {
|
[&](const DerivationOutput::CAFixed & dof) {
|
||||||
StorePath path = store.makeFixedOutputPath(drvName, { dof.hash, {} });
|
auto path = dof.path(store, drvName, i.first);
|
||||||
envHasRightPath(path, i.first);
|
envHasRightPath(path, i.first);
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating &) {
|
[&](const DerivationOutput::CAFloating &) {
|
||||||
|
@ -971,15 +965,16 @@ nlohmann::json DerivationOutput::toJSON(
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFixed & dof) {
|
[&](const DerivationOutput::CAFixed & dof) {
|
||||||
res["path"] = store.printStorePath(dof.path(store, drvName, outputName));
|
res["path"] = store.printStorePath(dof.path(store, drvName, outputName));
|
||||||
res["hashAlgo"] = dof.hash.printMethodAlgo();
|
res["hashAlgo"] = dof.ca.printMethodAlgo();
|
||||||
res["hash"] = dof.hash.hash.to_string(Base16, false);
|
res["hash"] = dof.ca.getHash().to_string(Base16, false);
|
||||||
|
// FIXME print refs?
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
res["hashAlgo"] = makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType);
|
res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType);
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Deferred &) {},
|
[&](const DerivationOutput::Deferred &) {},
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
res["hashAlgo"] = makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType);
|
res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType);
|
||||||
res["impure"] = true;
|
res["impure"] = true;
|
||||||
},
|
},
|
||||||
}, raw());
|
}, raw());
|
||||||
|
@ -998,15 +993,15 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
for (const auto & [key, _] : json)
|
for (const auto & [key, _] : json)
|
||||||
keys.insert(key);
|
keys.insert(key);
|
||||||
|
|
||||||
auto methodAlgo = [&]() -> std::pair<FileIngestionMethod, HashType> {
|
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashType> {
|
||||||
std::string hashAlgo = json["hashAlgo"];
|
std::string hashAlgo = json["hashAlgo"];
|
||||||
auto method = FileIngestionMethod::Flat;
|
// remaining to parse, will be mutated by parsers
|
||||||
if (hashAlgo.substr(0, 2) == "r:") {
|
std::string_view s = hashAlgo;
|
||||||
method = FileIngestionMethod::Recursive;
|
ContentAddressMethod method = ContentAddressMethod::parsePrefix(s);
|
||||||
hashAlgo = hashAlgo.substr(2);
|
if (method == TextIngestionMethod {})
|
||||||
}
|
xpSettings.require(Xp::DynamicDerivations);
|
||||||
auto hashType = parseHashType(hashAlgo);
|
auto hashType = parseHashType(s);
|
||||||
return { method, hashType };
|
return { std::move(method), std::move(hashType) };
|
||||||
};
|
};
|
||||||
|
|
||||||
if (keys == (std::set<std::string_view> { "path" })) {
|
if (keys == (std::set<std::string_view> { "path" })) {
|
||||||
|
@ -1018,10 +1013,9 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
|
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
|
||||||
auto [method, hashType] = methodAlgo();
|
auto [method, hashType] = methodAlgo();
|
||||||
auto dof = DerivationOutput::CAFixed {
|
auto dof = DerivationOutput::CAFixed {
|
||||||
.hash = {
|
.ca = ContentAddress::fromParts(
|
||||||
.method = method,
|
std::move(method),
|
||||||
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType),
|
Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType)),
|
||||||
},
|
|
||||||
};
|
};
|
||||||
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
|
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
|
||||||
throw Error("Path doesn't match derivation output");
|
throw Error("Path doesn't match derivation output");
|
||||||
|
@ -1032,8 +1026,8 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
xpSettings.require(Xp::CaDerivations);
|
xpSettings.require(Xp::CaDerivations);
|
||||||
auto [method, hashType] = methodAlgo();
|
auto [method, hashType] = methodAlgo();
|
||||||
return DerivationOutput::CAFloating {
|
return DerivationOutput::CAFloating {
|
||||||
.method = method,
|
.method = std::move(method),
|
||||||
.hashType = hashType,
|
.hashType = std::move(hashType),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1045,7 +1039,7 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
xpSettings.require(Xp::ImpureDerivations);
|
xpSettings.require(Xp::ImpureDerivations);
|
||||||
auto [method, hashType] = methodAlgo();
|
auto [method, hashType] = methodAlgo();
|
||||||
return DerivationOutput::Impure {
|
return DerivationOutput::Impure {
|
||||||
.method = method,
|
.method = std::move(method),
|
||||||
.hashType = hashType,
|
.hashType = hashType,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
#include "repair-flag.hh"
|
#include "repair-flag.hh"
|
||||||
|
#include "derived-path.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
#include "comparator.hh"
|
#include "comparator.hh"
|
||||||
|
|
||||||
|
@ -36,9 +37,11 @@ struct DerivationOutputInputAddressed
|
||||||
struct DerivationOutputCAFixed
|
struct DerivationOutputCAFixed
|
||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* hash used for expected hash computation
|
* Method and hash used for expected hash computation.
|
||||||
|
*
|
||||||
|
* References are not allowed by fiat.
|
||||||
*/
|
*/
|
||||||
FixedOutputHash hash;
|
ContentAddress ca;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the \ref StorePath "store path" corresponding to this output
|
* Return the \ref StorePath "store path" corresponding to this output
|
||||||
|
@ -48,7 +51,7 @@ struct DerivationOutputCAFixed
|
||||||
*/
|
*/
|
||||||
StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const;
|
StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const;
|
||||||
|
|
||||||
GENERATE_CMP(DerivationOutputCAFixed, me->hash);
|
GENERATE_CMP(DerivationOutputCAFixed, me->ca);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -61,7 +64,7 @@ struct DerivationOutputCAFloating
|
||||||
/**
|
/**
|
||||||
* How the file system objects will be serialized for hashing
|
* How the file system objects will be serialized for hashing
|
||||||
*/
|
*/
|
||||||
FileIngestionMethod method;
|
ContentAddressMethod method;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* How the serialization will be hashed
|
* How the serialization will be hashed
|
||||||
|
@ -88,7 +91,7 @@ struct DerivationOutputImpure
|
||||||
/**
|
/**
|
||||||
* How the file system objects will be serialized for hashing
|
* How the file system objects will be serialized for hashing
|
||||||
*/
|
*/
|
||||||
FileIngestionMethod method;
|
ContentAddressMethod method;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* How the serialization will be hashed
|
* How the serialization will be hashed
|
||||||
|
@ -343,12 +346,14 @@ struct Derivation : BasicDerivation
|
||||||
Store & store,
|
Store & store,
|
||||||
const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const;
|
const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const;
|
||||||
|
|
||||||
/* Check that the derivation is valid and does not present any
|
/**
|
||||||
illegal states.
|
* Check that the derivation is valid and does not present any
|
||||||
|
* illegal states.
|
||||||
This is mainly a matter of checking the outputs, where our C++
|
*
|
||||||
representation supports all sorts of combinations we do not yet
|
* This is mainly a matter of checking the outputs, where our C++
|
||||||
allow. */
|
* representation supports all sorts of combinations we do not yet
|
||||||
|
* allow.
|
||||||
|
*/
|
||||||
void checkInvariants(Store & store, const StorePath & drvPath) const;
|
void checkInvariants(Store & store, const StorePath & drvPath) const;
|
||||||
|
|
||||||
Derivation() = default;
|
Derivation() = default;
|
||||||
|
@ -491,17 +496,6 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
*/
|
*/
|
||||||
std::string hashPlaceholder(const std::string_view outputName);
|
std::string hashPlaceholder(const std::string_view outputName);
|
||||||
|
|
||||||
/**
|
|
||||||
* This creates an opaque and almost certainly unique string
|
|
||||||
* deterministically from a derivation path and output name.
|
|
||||||
*
|
|
||||||
* It is used as a placeholder to allow derivations to refer to
|
|
||||||
* content-addressed paths whose content --- and thus the path
|
|
||||||
* themselves --- isn't yet known. This occurs when a derivation has a
|
|
||||||
* dependency which is a CA derivation.
|
|
||||||
*/
|
|
||||||
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
|
|
||||||
|
|
||||||
extern const Hash impureOutputHash;
|
extern const Hash impureOutputHash;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
39
src/libstore/downstream-placeholder.cc
Normal file
39
src/libstore/downstream-placeholder.cc
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
#include "downstream-placeholder.hh"
|
||||||
|
#include "derivations.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
std::string DownstreamPlaceholder::render() const
|
||||||
|
{
|
||||||
|
return "/" + hash.to_string(Base32, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
|
||||||
|
const StorePath & drvPath,
|
||||||
|
std::string_view outputName)
|
||||||
|
{
|
||||||
|
auto drvNameWithExtension = drvPath.name();
|
||||||
|
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
|
||||||
|
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
|
||||||
|
return DownstreamPlaceholder {
|
||||||
|
hashString(htSHA256, clearText)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
|
||||||
|
const DownstreamPlaceholder & placeholder,
|
||||||
|
std::string_view outputName,
|
||||||
|
const ExperimentalFeatureSettings & xpSettings)
|
||||||
|
{
|
||||||
|
xpSettings.require(Xp::DynamicDerivations);
|
||||||
|
auto compressed = compressHash(placeholder.hash, 20);
|
||||||
|
auto clearText = "nix-computed-output:"
|
||||||
|
+ compressed.to_string(Base32, false)
|
||||||
|
+ ":" + std::string { outputName };
|
||||||
|
return DownstreamPlaceholder {
|
||||||
|
hashString(htSHA256, clearText)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
75
src/libstore/downstream-placeholder.hh
Normal file
75
src/libstore/downstream-placeholder.hh
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "hash.hh"
|
||||||
|
#include "path.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downstream Placeholders are opaque and almost certainly unique values
|
||||||
|
* used to allow derivations to refer to store objects which are yet to
|
||||||
|
* be built and for we do not yet have store paths for.
|
||||||
|
*
|
||||||
|
* They correspond to `DerivedPaths` that are not `DerivedPath::Opaque`,
|
||||||
|
* except for the cases involving input addressing or fixed outputs
|
||||||
|
* where we do know a store path for the derivation output in advance.
|
||||||
|
*
|
||||||
|
* Unlike `DerivationPath`, however, `DownstreamPlaceholder` is
|
||||||
|
* purposefully opaque and obfuscated. This is so they are hard to
|
||||||
|
* create by accident, and so substituting them (once we know what the
|
||||||
|
* path to store object is) is unlikely to capture other stuff it
|
||||||
|
* shouldn't.
|
||||||
|
*
|
||||||
|
* We use them with `Derivation`: the `render()` method is called to
|
||||||
|
* render an opaque string which can be used in the derivation, and the
|
||||||
|
* resolving logic can substitute those strings for store paths when
|
||||||
|
* resolving `Derivation.inputDrvs` to `BasicDerivation.inputSrcs`.
|
||||||
|
*/
|
||||||
|
class DownstreamPlaceholder
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* `DownstreamPlaceholder` is just a newtype of `Hash`.
|
||||||
|
* This its only field.
|
||||||
|
*/
|
||||||
|
Hash hash;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Newtype constructor
|
||||||
|
*/
|
||||||
|
DownstreamPlaceholder(Hash hash) : hash(hash) { }
|
||||||
|
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* This creates an opaque and almost certainly unique string
|
||||||
|
* deterministically from the placeholder.
|
||||||
|
*/
|
||||||
|
std::string render() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a placeholder for an unknown output of a content-addressed
|
||||||
|
* derivation.
|
||||||
|
*
|
||||||
|
* The derivation itself is known (we have a store path for it), but
|
||||||
|
* the output doesn't yet have a known store path.
|
||||||
|
*/
|
||||||
|
static DownstreamPlaceholder unknownCaOutput(
|
||||||
|
const StorePath & drvPath,
|
||||||
|
std::string_view outputName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a placehold for the output of an unknown derivation.
|
||||||
|
*
|
||||||
|
* The derivation is not yet known because it is a dynamic
|
||||||
|
* derivaiton --- it is itself an output of another derivation ---
|
||||||
|
* and we just have (another) placeholder for it.
|
||||||
|
*
|
||||||
|
* @param xpSettings Stop-gap to avoid globals during unit tests.
|
||||||
|
*/
|
||||||
|
static DownstreamPlaceholder unknownDerivation(
|
||||||
|
const DownstreamPlaceholder & drvPlaceholder,
|
||||||
|
std::string_view outputName,
|
||||||
|
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -159,6 +159,15 @@ public:
|
||||||
)",
|
)",
|
||||||
{"build-max-jobs"}};
|
{"build-max-jobs"}};
|
||||||
|
|
||||||
|
Setting<unsigned int> maxSubstitutionJobs{
|
||||||
|
this, 16, "max-substitution-jobs",
|
||||||
|
R"(
|
||||||
|
This option defines the maximum number of substitution jobs that Nix
|
||||||
|
will try to run in parallel. The default is `16`. The minimum value
|
||||||
|
one can choose is `1` and lower values will be interpreted as `1`.
|
||||||
|
)",
|
||||||
|
{"substitution-max-jobs"}};
|
||||||
|
|
||||||
Setting<unsigned int> buildCores{
|
Setting<unsigned int> buildCores{
|
||||||
this,
|
this,
|
||||||
getDefaultCores(),
|
getDefaultCores(),
|
||||||
|
@ -972,7 +981,7 @@ public:
|
||||||
this, false, "use-xdg-base-directories",
|
this, false, "use-xdg-base-directories",
|
||||||
R"(
|
R"(
|
||||||
If set to `true`, Nix will conform to the [XDG Base Directory Specification] for files in `$HOME`.
|
If set to `true`, Nix will conform to the [XDG Base Directory Specification] for files in `$HOME`.
|
||||||
The environment variables used to implement this are documented in the [Environment Variables section](@docroot@/installation/env-variables.md).
|
The environment variables used to implement this are documented in the [Environment Variables section](@docroot@/command-ref/env-common.md).
|
||||||
|
|
||||||
[XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
[XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
|
|
||||||
|
|
|
@ -344,6 +344,17 @@ public:
|
||||||
virtual ref<FSAccessor> getFSAccessor() override
|
virtual ref<FSAccessor> getFSAccessor() override
|
||||||
{ unsupported("getFSAccessor"); }
|
{ unsupported("getFSAccessor"); }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The default instance would schedule the work on the client side, but
|
||||||
|
* for consistency with `buildPaths` and `buildDerivation` it should happen
|
||||||
|
* on the remote side.
|
||||||
|
*
|
||||||
|
* We make this fail for now so we can add implement this properly later
|
||||||
|
* without it being a breaking change.
|
||||||
|
*/
|
||||||
|
void repairPath(const StorePath & path) override
|
||||||
|
{ unsupported("repairPath"); }
|
||||||
|
|
||||||
void computeFSClosure(const StorePathSet & paths,
|
void computeFSClosure(const StorePathSet & paths,
|
||||||
StorePathSet & out, bool flipDirection = false,
|
StorePathSet & out, bool flipDirection = false,
|
||||||
bool includeOutputs = false, bool includeDerivers = false) override
|
bool includeOutputs = false, bool includeDerivers = false) override
|
||||||
|
|
|
@ -240,8 +240,6 @@ public:
|
||||||
|
|
||||||
void vacuumDB();
|
void vacuumDB();
|
||||||
|
|
||||||
void repairPath(const StorePath & path) override;
|
|
||||||
|
|
||||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -57,12 +57,6 @@ $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||||
|
|
||||||
$(d)/build.cc:
|
$(d)/build.cc:
|
||||||
|
|
||||||
%.gen.hh: %
|
|
||||||
@echo 'R"foo(' >> $@.tmp
|
|
||||||
$(trace-gen) cat $< >> $@.tmp
|
|
||||||
@echo ')foo"' >> $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
||||||
|
|
|
@ -83,14 +83,15 @@ void Store::computeFSClosure(const StorePath & startPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv)
|
const ContentAddress * getDerivationCA(const BasicDerivation & drv)
|
||||||
{
|
{
|
||||||
auto out = drv.outputs.find("out");
|
auto out = drv.outputs.find("out");
|
||||||
if (out != drv.outputs.end()) {
|
if (out == drv.outputs.end())
|
||||||
if (const auto * v = std::get_if<DerivationOutput::CAFixed>(&out->second.raw()))
|
return nullptr;
|
||||||
return v->hash;
|
if (auto dof = std::get_if<DerivationOutput::CAFixed>(&out->second)) {
|
||||||
|
return &dof->ca;
|
||||||
}
|
}
|
||||||
return std::nullopt;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Store::queryMissing(const std::vector<DerivedPath> & targets,
|
void Store::queryMissing(const std::vector<DerivedPath> & targets,
|
||||||
|
@ -140,7 +141,13 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
|
||||||
if (drvState_->lock()->done) return;
|
if (drvState_->lock()->done) return;
|
||||||
|
|
||||||
SubstitutablePathInfos infos;
|
SubstitutablePathInfos infos;
|
||||||
querySubstitutablePathInfos({{outPath, getDerivationCA(*drv)}}, infos);
|
auto * cap = getDerivationCA(*drv);
|
||||||
|
querySubstitutablePathInfos({
|
||||||
|
{
|
||||||
|
outPath,
|
||||||
|
cap ? std::optional { *cap } : std::nullopt,
|
||||||
|
},
|
||||||
|
}, infos);
|
||||||
|
|
||||||
if (infos.empty()) {
|
if (infos.empty()) {
|
||||||
drvState_->lock()->done = true;
|
drvState_->lock()->done = true;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "path-info.hh"
|
#include "path-info.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -136,6 +136,19 @@ size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const
|
||||||
return good;
|
return good;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SingleDrvOutputs filterDrvOutputs(const OutputsSpec& wanted, SingleDrvOutputs&& outputs)
|
||||||
|
{
|
||||||
|
SingleDrvOutputs ret = std::move(outputs);
|
||||||
|
for (auto it = ret.begin(); it != ret.end(); ) {
|
||||||
|
if (!wanted.contains(it->first))
|
||||||
|
it = ret.erase(it);
|
||||||
|
else
|
||||||
|
++it;
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
StorePath RealisedPath::path() const {
|
StorePath RealisedPath::path() const {
|
||||||
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
|
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
|
struct OutputsSpec;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A general `Realisation` key.
|
* A general `Realisation` key.
|
||||||
|
@ -93,6 +94,14 @@ typedef std::map<std::string, Realisation> SingleDrvOutputs;
|
||||||
*/
|
*/
|
||||||
typedef std::map<DrvOutput, Realisation> DrvOutputs;
|
typedef std::map<DrvOutput, Realisation> DrvOutputs;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter a SingleDrvOutputs to include only specific output names
|
||||||
|
*
|
||||||
|
* Moves the `outputs` input.
|
||||||
|
*/
|
||||||
|
SingleDrvOutputs filterDrvOutputs(const OutputsSpec&, SingleDrvOutputs&&);
|
||||||
|
|
||||||
|
|
||||||
struct OpaquePath {
|
struct OpaquePath {
|
||||||
StorePath path;
|
StorePath path;
|
||||||
|
|
||||||
|
|
|
@ -18,189 +18,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
namespace worker_proto {
|
|
||||||
|
|
||||||
std::string read(const Store & store, Source & from, Phantom<std::string> _)
|
|
||||||
{
|
|
||||||
return readString(from);
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const std::string & str)
|
|
||||||
{
|
|
||||||
out << str;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
|
|
||||||
{
|
|
||||||
return store.parseStorePath(readString(from));
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const StorePath & storePath)
|
|
||||||
{
|
|
||||||
out << store.printStorePath(storePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
std::optional<TrustedFlag> read(const Store & store, Source & from, Phantom<std::optional<TrustedFlag>> _)
|
|
||||||
{
|
|
||||||
auto temp = readNum<uint8_t>(from);
|
|
||||||
switch (temp) {
|
|
||||||
case 0:
|
|
||||||
return std::nullopt;
|
|
||||||
case 1:
|
|
||||||
return { Trusted };
|
|
||||||
case 2:
|
|
||||||
return { NotTrusted };
|
|
||||||
default:
|
|
||||||
throw Error("Invalid trusted status from remote");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const std::optional<TrustedFlag> & optTrusted)
|
|
||||||
{
|
|
||||||
if (!optTrusted)
|
|
||||||
out << (uint8_t)0;
|
|
||||||
else {
|
|
||||||
switch (*optTrusted) {
|
|
||||||
case Trusted:
|
|
||||||
out << (uint8_t)1;
|
|
||||||
break;
|
|
||||||
case NotTrusted:
|
|
||||||
out << (uint8_t)2;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
assert(false);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
ContentAddress read(const Store & store, Source & from, Phantom<ContentAddress> _)
|
|
||||||
{
|
|
||||||
return ContentAddress::parse(readString(from));
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const ContentAddress & ca)
|
|
||||||
{
|
|
||||||
out << renderContentAddress(ca);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
DerivedPath read(const Store & store, Source & from, Phantom<DerivedPath> _)
|
|
||||||
{
|
|
||||||
auto s = readString(from);
|
|
||||||
return DerivedPath::parseLegacy(store, s);
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const DerivedPath & req)
|
|
||||||
{
|
|
||||||
out << req.to_string_legacy(store);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Realisation read(const Store & store, Source & from, Phantom<Realisation> _)
|
|
||||||
{
|
|
||||||
std::string rawInput = readString(from);
|
|
||||||
return Realisation::fromJSON(
|
|
||||||
nlohmann::json::parse(rawInput),
|
|
||||||
"remote-protocol"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const Realisation & realisation)
|
|
||||||
{
|
|
||||||
out << realisation.toJSON().dump();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
DrvOutput read(const Store & store, Source & from, Phantom<DrvOutput> _)
|
|
||||||
{
|
|
||||||
return DrvOutput::parse(readString(from));
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const DrvOutput & drvOutput)
|
|
||||||
{
|
|
||||||
out << drvOutput.to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
KeyedBuildResult read(const Store & store, Source & from, Phantom<KeyedBuildResult> _)
|
|
||||||
{
|
|
||||||
auto path = worker_proto::read(store, from, Phantom<DerivedPath> {});
|
|
||||||
auto br = worker_proto::read(store, from, Phantom<BuildResult> {});
|
|
||||||
return KeyedBuildResult {
|
|
||||||
std::move(br),
|
|
||||||
/* .path = */ std::move(path),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & to, const KeyedBuildResult & res)
|
|
||||||
{
|
|
||||||
worker_proto::write(store, to, res.path);
|
|
||||||
worker_proto::write(store, to, static_cast<const BuildResult &>(res));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
BuildResult read(const Store & store, Source & from, Phantom<BuildResult> _)
|
|
||||||
{
|
|
||||||
BuildResult res;
|
|
||||||
res.status = (BuildResult::Status) readInt(from);
|
|
||||||
from
|
|
||||||
>> res.errorMsg
|
|
||||||
>> res.timesBuilt
|
|
||||||
>> res.isNonDeterministic
|
|
||||||
>> res.startTime
|
|
||||||
>> res.stopTime;
|
|
||||||
auto builtOutputs = worker_proto::read(store, from, Phantom<DrvOutputs> {});
|
|
||||||
for (auto && [output, realisation] : builtOutputs)
|
|
||||||
res.builtOutputs.insert_or_assign(
|
|
||||||
std::move(output.outputName),
|
|
||||||
std::move(realisation));
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & to, const BuildResult & res)
|
|
||||||
{
|
|
||||||
to
|
|
||||||
<< res.status
|
|
||||||
<< res.errorMsg
|
|
||||||
<< res.timesBuilt
|
|
||||||
<< res.isNonDeterministic
|
|
||||||
<< res.startTime
|
|
||||||
<< res.stopTime;
|
|
||||||
DrvOutputs builtOutputs;
|
|
||||||
for (auto & [output, realisation] : res.builtOutputs)
|
|
||||||
builtOutputs.insert_or_assign(realisation.id, realisation);
|
|
||||||
worker_proto::write(store, to, builtOutputs);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
|
|
||||||
{
|
|
||||||
auto s = readString(from);
|
|
||||||
return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
|
|
||||||
{
|
|
||||||
out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
std::optional<ContentAddress> read(const Store & store, Source & from, Phantom<std::optional<ContentAddress>> _)
|
|
||||||
{
|
|
||||||
return ContentAddress::parseOpt(readString(from));
|
|
||||||
}
|
|
||||||
|
|
||||||
void write(const Store & store, Sink & out, const std::optional<ContentAddress> & caOpt)
|
|
||||||
{
|
|
||||||
out << (caOpt ? renderContentAddress(*caOpt) : "");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* TODO: Separate these store impls into different files, give them better names */
|
/* TODO: Separate these store impls into different files, give them better names */
|
||||||
RemoteStore::RemoteStore(const Params & params)
|
RemoteStore::RemoteStore(const Params & params)
|
||||||
: RemoteStoreConfig(params)
|
: RemoteStoreConfig(params)
|
||||||
|
@ -597,6 +414,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
ContentAddressMethod caMethod,
|
ContentAddressMethod caMethod,
|
||||||
|
HashType hashType,
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
RepairFlag repair)
|
RepairFlag repair)
|
||||||
{
|
{
|
||||||
|
@ -608,7 +426,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
conn->to
|
conn->to
|
||||||
<< wopAddToStore
|
<< wopAddToStore
|
||||||
<< name
|
<< name
|
||||||
<< caMethod.render();
|
<< caMethod.render(hashType);
|
||||||
worker_proto::write(*this, conn->to, references);
|
worker_proto::write(*this, conn->to, references);
|
||||||
conn->to << repair;
|
conn->to << repair;
|
||||||
|
|
||||||
|
@ -628,26 +446,29 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
|
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
|
||||||
|
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const TextHashMethod & thm) -> void {
|
[&](const TextIngestionMethod & thm) -> void {
|
||||||
|
if (hashType != htSHA256)
|
||||||
|
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||||
|
name, printHashType(hashType));
|
||||||
std::string s = dump.drain();
|
std::string s = dump.drain();
|
||||||
conn->to << wopAddTextToStore << name << s;
|
conn->to << wopAddTextToStore << name << s;
|
||||||
worker_proto::write(*this, conn->to, references);
|
worker_proto::write(*this, conn->to, references);
|
||||||
conn.processStderr();
|
conn.processStderr();
|
||||||
},
|
},
|
||||||
[&](const FixedOutputHashMethod & fohm) -> void {
|
[&](const FileIngestionMethod & fim) -> void {
|
||||||
conn->to
|
conn->to
|
||||||
<< wopAddToStore
|
<< wopAddToStore
|
||||||
<< name
|
<< name
|
||||||
<< ((fohm.hashType == htSHA256 && fohm.fileIngestionMethod == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||||
<< (fohm.fileIngestionMethod == FileIngestionMethod::Recursive ? 1 : 0)
|
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
|
||||||
<< printHashType(fohm.hashType);
|
<< printHashType(hashType);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
conn->to.written = 0;
|
conn->to.written = 0;
|
||||||
connections->incCapacity();
|
connections->incCapacity();
|
||||||
{
|
{
|
||||||
Finally cleanup([&]() { connections->decCapacity(); });
|
Finally cleanup([&]() { connections->decCapacity(); });
|
||||||
if (fohm.fileIngestionMethod == FileIngestionMethod::Recursive) {
|
if (fim == FileIngestionMethod::Recursive) {
|
||||||
dump.drainInto(conn->to);
|
dump.drainInto(conn->to);
|
||||||
} else {
|
} else {
|
||||||
std::string contents = dump.drain();
|
std::string contents = dump.drain();
|
||||||
|
@ -678,7 +499,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
|
FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
|
||||||
{
|
{
|
||||||
return addCAToStore(dump, name, FixedOutputHashMethod{ .fileIngestionMethod = method, .hashType = hashType }, references, repair)->path;
|
return addCAToStore(dump, name, method, hashType, references, repair)->path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -778,7 +599,7 @@ StorePath RemoteStore::addTextToStore(
|
||||||
RepairFlag repair)
|
RepairFlag repair)
|
||||||
{
|
{
|
||||||
StringSource source(s);
|
StringSource source(s);
|
||||||
return addCAToStore(source, name, TextHashMethod{}, references, repair)->path;
|
return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path;
|
||||||
}
|
}
|
||||||
|
|
||||||
void RemoteStore::registerDrvOutput(const Realisation & info)
|
void RemoteStore::registerDrvOutput(const Realisation & info)
|
||||||
|
|
|
@ -78,6 +78,7 @@ public:
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
ContentAddressMethod caMethod,
|
ContentAddressMethod caMethod,
|
||||||
|
HashType hashType,
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
RepairFlag repair);
|
RepairFlag repair);
|
||||||
|
|
||||||
|
@ -136,6 +137,17 @@ public:
|
||||||
|
|
||||||
bool verifyStore(bool checkContents, RepairFlag repair) override;
|
bool verifyStore(bool checkContents, RepairFlag repair) override;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The default instance would schedule the work on the client side, but
|
||||||
|
* for consistency with `buildPaths` and `buildDerivation` it should happen
|
||||||
|
* on the remote side.
|
||||||
|
*
|
||||||
|
* We make this fail for now so we can add implement this properly later
|
||||||
|
* without it being a breaking change.
|
||||||
|
*/
|
||||||
|
void repairPath(const StorePath & path) override
|
||||||
|
{ unsupported("repairPath"); }
|
||||||
|
|
||||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||||
|
|
||||||
void queryMissing(const std::vector<DerivedPath> & targets,
|
void queryMissing(const std::vector<DerivedPath> & targets,
|
||||||
|
|
|
@ -41,6 +41,11 @@ void SSHMaster::addCommonSSHOpts(Strings & args)
|
||||||
args.push_back("-oLocalCommand=echo started");
|
args.push_back("-oLocalCommand=echo started");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool SSHMaster::isMasterRunning() {
|
||||||
|
auto res = runProgram(RunOptions {.program = "ssh", .args = {"-O", "check", host}, .mergeStderrToStdout = true});
|
||||||
|
return res.first == 0;
|
||||||
|
}
|
||||||
|
|
||||||
std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string & command)
|
std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string & command)
|
||||||
{
|
{
|
||||||
Path socketPath = startMaster();
|
Path socketPath = startMaster();
|
||||||
|
@ -97,7 +102,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
||||||
|
|
||||||
// Wait for the SSH connection to be established,
|
// Wait for the SSH connection to be established,
|
||||||
// So that we don't overwrite the password prompt with our progress bar.
|
// So that we don't overwrite the password prompt with our progress bar.
|
||||||
if (!fakeSSH && !useMaster) {
|
if (!fakeSSH && !useMaster && !isMasterRunning()) {
|
||||||
std::string reply;
|
std::string reply;
|
||||||
try {
|
try {
|
||||||
reply = readLine(out.readSide.get());
|
reply = readLine(out.readSide.get());
|
||||||
|
@ -133,6 +138,8 @@ Path SSHMaster::startMaster()
|
||||||
logger->pause();
|
logger->pause();
|
||||||
Finally cleanup = [&]() { logger->resume(); };
|
Finally cleanup = [&]() { logger->resume(); };
|
||||||
|
|
||||||
|
bool wasMasterRunning = isMasterRunning();
|
||||||
|
|
||||||
state->sshMaster = startProcess([&]() {
|
state->sshMaster = startProcess([&]() {
|
||||||
restoreProcessContext();
|
restoreProcessContext();
|
||||||
|
|
||||||
|
@ -152,13 +159,15 @@ Path SSHMaster::startMaster()
|
||||||
|
|
||||||
out.writeSide = -1;
|
out.writeSide = -1;
|
||||||
|
|
||||||
std::string reply;
|
if (!wasMasterRunning) {
|
||||||
try {
|
std::string reply;
|
||||||
reply = readLine(out.readSide.get());
|
try {
|
||||||
} catch (EndOfFile & e) { }
|
reply = readLine(out.readSide.get());
|
||||||
|
} catch (EndOfFile & e) { }
|
||||||
|
|
||||||
if (reply != "started")
|
if (reply != "started")
|
||||||
throw Error("failed to start SSH master connection to '%s'", host);
|
throw Error("failed to start SSH master connection to '%s'", host);
|
||||||
|
}
|
||||||
|
|
||||||
return state->socketPath;
|
return state->socketPath;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ private:
|
||||||
Sync<State> state_;
|
Sync<State> state_;
|
||||||
|
|
||||||
void addCommonSSHOpts(Strings & args);
|
void addCommonSSHOpts(Strings & args);
|
||||||
|
bool isMasterRunning();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
|
|
@ -679,8 +679,7 @@ public:
|
||||||
* Repair the contents of the given path by redownloading it using
|
* Repair the contents of the given path by redownloading it using
|
||||||
* a substituter (if available).
|
* a substituter (if available).
|
||||||
*/
|
*/
|
||||||
virtual void repairPath(const StorePath & path)
|
virtual void repairPath(const StorePath & path);
|
||||||
{ unsupported("repairPath"); }
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add signatures to the specified store path. The signatures are
|
* Add signatures to the specified store path. The signatures are
|
||||||
|
@ -1022,7 +1021,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
|
||||||
*/
|
*/
|
||||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||||
|
|
||||||
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv);
|
const ContentAddress * getDerivationCA(const BasicDerivation & drv);
|
||||||
|
|
||||||
std::map<DrvOutput, StorePath> drvOutputReferences(
|
std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||||
Store & store,
|
Store & store,
|
||||||
|
|
|
@ -26,6 +26,14 @@ class CaDerivationTest : public DerivationTest
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class DynDerivationTest : public DerivationTest
|
||||||
|
{
|
||||||
|
void SetUp() override
|
||||||
|
{
|
||||||
|
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
class ImpureDerivationTest : public DerivationTest
|
class ImpureDerivationTest : public DerivationTest
|
||||||
{
|
{
|
||||||
void SetUp() override
|
void SetUp() override
|
||||||
|
@ -66,20 +74,47 @@ TEST_JSON(DerivationTest, inputAddressed,
|
||||||
}),
|
}),
|
||||||
"drv-name", "output-name")
|
"drv-name", "output-name")
|
||||||
|
|
||||||
TEST_JSON(DerivationTest, caFixed,
|
TEST_JSON(DerivationTest, caFixedFlat,
|
||||||
|
R"({
|
||||||
|
"hashAlgo": "sha256",
|
||||||
|
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
||||||
|
"path": "/nix/store/rhcg9h16sqvlbpsa6dqm57sbr2al6nzg-drv-name-output-name"
|
||||||
|
})",
|
||||||
|
(DerivationOutput::CAFixed {
|
||||||
|
.ca = FixedOutputHash {
|
||||||
|
.method = FileIngestionMethod::Flat,
|
||||||
|
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
"drv-name", "output-name")
|
||||||
|
|
||||||
|
TEST_JSON(DerivationTest, caFixedNAR,
|
||||||
R"({
|
R"({
|
||||||
"hashAlgo": "r:sha256",
|
"hashAlgo": "r:sha256",
|
||||||
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
||||||
"path": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name"
|
"path": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name"
|
||||||
})",
|
})",
|
||||||
(DerivationOutput::CAFixed {
|
(DerivationOutput::CAFixed {
|
||||||
.hash = {
|
.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Recursive,
|
.method = FileIngestionMethod::Recursive,
|
||||||
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
"drv-name", "output-name")
|
"drv-name", "output-name")
|
||||||
|
|
||||||
|
TEST_JSON(DynDerivationTest, caFixedText,
|
||||||
|
R"({
|
||||||
|
"hashAlgo": "text:sha256",
|
||||||
|
"hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f",
|
||||||
|
"path": "/nix/store/6s1zwabh956jvhv4w9xcdb5jiyanyxg1-drv-name-output-name"
|
||||||
|
})",
|
||||||
|
(DerivationOutput::CAFixed {
|
||||||
|
.ca = TextHash {
|
||||||
|
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
"drv-name", "output-name")
|
||||||
|
|
||||||
TEST_JSON(CaDerivationTest, caFloating,
|
TEST_JSON(CaDerivationTest, caFloating,
|
||||||
R"({
|
R"({
|
||||||
"hashAlgo": "r:sha256"
|
"hashAlgo": "r:sha256"
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue