diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
new file mode 100644
index 000000000..537aa0909
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
@@ -0,0 +1,7 @@
+**Release Notes**
+Please include relevant [release notes](https://github.com/NixOS/nix/blob/master/doc/manual/src/release-notes/rl-next.md) as needed.
+
+
+**Testing**
+
+If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
new file mode 100644
index 000000000..ec7ab4516
--- /dev/null
+++ b/.github/workflows/backport.yml
@@ -0,0 +1,26 @@
+name: Backport
+on:
+ pull_request_target:
+ types: [closed, labeled]
+jobs:
+ backport:
+ name: Backport Pull Request
+ if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+ # required to find all branches
+ fetch-depth: 0
+ - name: Create backport PRs
+ # should be kept in sync with `version`
+ uses: zeebe-io/backport-action@v0.0.7
+ with:
+ # Config README: https://github.com/zeebe-io/backport-action#backport-action
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ github_workspace: ${{ github.workspace }}
+ pull_description: |-
+ Bot-based backport to `${target_branch}`, triggered by a label in #${pull_number}.
+ # should be kept in sync with `uses`
+ version: v0.0.5
diff --git a/.github/workflows/test.yml b/.github/workflows/ci.yml
similarity index 56%
rename from .github/workflows/test.yml
rename to .github/workflows/ci.yml
index abaff75ee..09436b7e3 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/ci.yml
@@ -1,20 +1,23 @@
-name: "Test"
+name: "CI"
+
on:
pull_request:
push:
+
jobs:
+
tests:
needs: [check_cachix]
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
-
+ timeout-minutes: 60
steps:
- - uses: actions/checkout@v2.3.4
+ - uses: actions/checkout@v2.4.0
with:
fetch-depth: 0
- - uses: cachix/install-nix-action@v14
+ - uses: cachix/install-nix-action@v16
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
if: needs.check_cachix.outputs.secret == 'true'
@@ -22,7 +25,8 @@ jobs:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- - run: nix-build -A checks.$(nix-instantiate --eval -E '(builtins.currentSystem)')
+ - run: nix --experimental-features 'nix-command flakes' flake check -L
+
check_cachix:
name: Cachix secret present for installer tests
runs-on: ubuntu-latest
@@ -34,6 +38,7 @@ jobs:
env:
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
+
installer:
needs: [tests, check_cachix]
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
@@ -41,11 +46,11 @@ jobs:
outputs:
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
steps:
- - uses: actions/checkout@v2.3.4
+ - uses: actions/checkout@v2.4.0
with:
fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v14
+ - uses: cachix/install-nix-action@v16
- uses: cachix/cachix-action@v10
with:
name: '${{ env.CACHIX_NAME }}'
@@ -53,6 +58,7 @@ jobs:
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- id: prepare-installer
run: scripts/prepare-installer-for-github-actions
+
installer_test:
needs: [installer, check_cachix]
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
@@ -61,10 +67,42 @@ jobs:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- - uses: actions/checkout@v2.3.4
+ - uses: actions/checkout@v2.4.0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v14
+ - uses: cachix/install-nix-action@v16
with:
install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
- run: nix-instantiate -E 'builtins.currentTime' --eval
+
+ docker_push_image:
+ needs: [check_cachix, tests]
+ if: >-
+ github.event_name == 'push' &&
+ github.ref_name == 'master' &&
+ needs.check_cachix.outputs.secret == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2.4.0
+ with:
+ fetch-depth: 0
+ - uses: cachix/install-nix-action@v16
+ - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
+ - run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
+ - uses: cachix/cachix-action@v10
+ if: needs.check_cachix.outputs.secret == 'true'
+ with:
+ name: '${{ env.CACHIX_NAME }}'
+ signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
+ authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
+ - run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
+ - run: docker load -i ./result/image.tar.gz
+ - run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
+ - run: docker tag nix:$NIX_VERSION nixos/nix:master
+ - name: Login to Docker Hub
+ uses: docker/login-action@v1
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - run: docker push nixos/nix:$NIX_VERSION
+ - run: docker push nixos/nix:master
diff --git a/.github/workflows/hydra_status.yml b/.github/workflows/hydra_status.yml
new file mode 100644
index 000000000..b97076bd7
--- /dev/null
+++ b/.github/workflows/hydra_status.yml
@@ -0,0 +1,16 @@
+name: Hydra status
+on:
+ schedule:
+ - cron: "12,42 * * * *"
+ workflow_dispatch:
+jobs:
+ check_hydra_status:
+ name: Check Hydra status
+ if: github.repository_owner == 'NixOS'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2.4.0
+ with:
+ fetch-depth: 0
+ - run: bash scripts/check-hydra-status.sh
+
diff --git a/.gitignore b/.gitignore
index 86ffe9304..4b290425a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,8 +26,6 @@ perl/Makefile.config
# /scripts/
/scripts/nix-profile.sh
-/scripts/nix-reduce-build
-/scripts/nix-http-export.cgi
/scripts/nix-profile-daemon.sh
# /src/libexpr/
@@ -40,6 +38,7 @@ perl/Makefile.config
# /src/libstore/
*.gen.*
+/src/libstore/tests/libstore-tests
# /src/libutil/
/src/libutil/tests/libutil-tests
@@ -121,3 +120,7 @@ GTAGS
compile_commands.json
nix-rust/target
+
+result
+
+.vscode/
diff --git a/.version b/.version
index 7208c2182..9aa34646d 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.4
\ No newline at end of file
+2.7.0
\ No newline at end of file
diff --git a/Makefile b/Makefile
index c7d8967c8..5040d2884 100644
--- a/Makefile
+++ b/Makefile
@@ -4,6 +4,7 @@ makefiles = \
src/libutil/local.mk \
src/libutil/tests/local.mk \
src/libstore/local.mk \
+ src/libstore/tests/local.mk \
src/libfetchers/local.mk \
src/libmain/local.mk \
src/libexpr/local.mk \
diff --git a/Makefile.config.in b/Makefile.config.in
index c8c4446b4..3505f337e 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -16,6 +16,7 @@ LDFLAGS = @LDFLAGS@
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@
+LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
diff --git a/boehmgc-coroutine-sp-fallback.diff b/boehmgc-coroutine-sp-fallback.diff
index fa8dd0325..e659bf470 100644
--- a/boehmgc-coroutine-sp-fallback.diff
+++ b/boehmgc-coroutine-sp-fallback.diff
@@ -1,8 +1,8 @@
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
-index 1cee6a0b..46c3acd9 100644
+index 4b2c429..1fb4c52 100644
--- a/pthread_stop_world.c
+++ b/pthread_stop_world.c
-@@ -674,6 +674,8 @@ GC_INNER void GC_push_all_stacks(void)
+@@ -673,6 +673,8 @@ GC_INNER void GC_push_all_stacks(void)
struct GC_traced_stack_sect_s *traced_stack_sect;
pthread_t self = pthread_self();
word total_size = 0;
@@ -11,7 +11,7 @@ index 1cee6a0b..46c3acd9 100644
if (!EXPECT(GC_thr_initialized, TRUE))
GC_thr_init();
-@@ -723,6 +725,28 @@ GC_INNER void GC_push_all_stacks(void)
+@@ -722,6 +724,31 @@ GC_INNER void GC_push_all_stacks(void)
hi = p->altstack + p->altstack_size;
/* FIXME: Need to scan the normal stack too, but how ? */
/* FIXME: Assume stack grows down */
@@ -22,6 +22,9 @@ index 1cee6a0b..46c3acd9 100644
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
+ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
+ }
++ if (pthread_attr_destroy(&pattr)) {
++ ABORT("GC_push_all_stacks: pthread_attr_destroy failed!");
++ }
+ // When a thread goes into a coroutine, we lose its original sp until
+ // control flow returns to the thread.
+ // While in the coroutine, the sp points outside the thread stack,
diff --git a/configure.ac b/configure.ac
index 65478ecc5..8a01c33ec 100644
--- a/configure.ac
+++ b/configure.ac
@@ -188,17 +188,24 @@ PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLA
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])])
])
-# Look for libsodium, an optional dependency.
+# Look for libsodium.
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
# Look for libbrotli{enc,dec}.
PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"])
# Look for libcpuid.
+have_libcpuid=
if test "$machine_name" = "x86_64"; then
- PKG_CHECK_MODULES([LIBCPUID], [libcpuid], [CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"])
- have_libcpuid=1
- AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])
+ AC_ARG_ENABLE([cpuid],
+ AS_HELP_STRING([--disable-cpuid], [Do not determine microarchitecture levels with libcpuid (relevant to x86_64 only)]))
+ if test "x$enable_cpuid" != "xno"; then
+ PKG_CHECK_MODULES([LIBCPUID], [libcpuid],
+ [CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"
+ have_libcpuid=1
+ AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])]
+ )
+ fi
fi
AC_SUBST(HAVE_LIBCPUID, [$have_libcpuid])
@@ -255,13 +262,17 @@ fi
PKG_CHECK_MODULES([GTEST], [gtest_main])
+# Look for nlohmann/json.
+PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
+
+
# documentation generation switch
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
doc_generate=$enableval, doc_generate=yes)
AC_SUBST(doc_generate)
# Look for lowdown library.
-PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.8.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
+PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
# Setuid installations.
AC_CHECK_FUNCS([setresuid setreuid lchown])
diff --git a/default.nix b/default.nix
index 71d1a80ad..00ec5b617 100644
--- a/default.nix
+++ b/default.nix
@@ -1,3 +1,3 @@
-(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
+(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
src = ./.;
}).defaultNix
diff --git a/doc/manual/generate-builtins.nix b/doc/manual/generate-builtins.nix
index 92c7b1a31..6c8b88da2 100644
--- a/doc/manual/generate-builtins.nix
+++ b/doc/manual/generate-builtins.nix
@@ -6,9 +6,9 @@ builtins:
concatStrings (map
(name:
let builtin = builtins.${name}; in
- "
${name} "
+ "${name} "
+ concatStringsSep " " (map (s: "${s}") builtin.args)
- + "
"
+ + "
"
+ "\n\n"
+ builtin.doc
+ "\n\n"
diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix
index 4fc9abea1..244cfa0c2 100644
--- a/doc/manual/generate-manpage.nix
+++ b/doc/manual/generate-manpage.nix
@@ -1,4 +1,4 @@
-command:
+{ command, renderLinks ? false }:
with builtins;
with import ./utils.nix;
@@ -20,7 +20,11 @@ let
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
listCommands = cmds:
concatStrings (map (name:
- "* [`${command} ${name}`](./${appendName filename name}.md) - ${cmds.${name}.description}\n")
+ "* "
+ + (if renderLinks
+ then "[`${command} ${name}`](./${appendName filename name}.md)"
+ else "`${command} ${name}`")
+ + " - ${cmds.${name}.description}\n")
(attrNames cmds));
in
"where *subcommand* is one of the following:\n\n"
diff --git a/doc/manual/generate-options.nix b/doc/manual/generate-options.nix
index 3c31a4eec..84d90beb6 100644
--- a/doc/manual/generate-options.nix
+++ b/doc/manual/generate-options.nix
@@ -8,17 +8,19 @@ concatStrings (map
let option = options.${name}; in
" - `${name}` \n\n"
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
- + " **Default:** " + (
- if option.value == "" || option.value == []
- then "*empty*"
- else if isBool option.value
- then (if option.value then "`true`" else "`false`")
- else
- # n.b. a StringMap value type is specified as a string, but
- # this shows the value type. The empty stringmap is "null" in
- # JSON, but that converts to "{ }" here.
- (if isAttrs option.value then "`\"\"`"
- else "`" + toString option.value + "`")) + "\n\n"
+ + (if option.documentDefault
+ then " **Default:** " + (
+ if option.value == "" || option.value == []
+ then "*empty*"
+ else if isBool option.value
+ then (if option.value then "`true`" else "`false`")
+ else
+ # n.b. a StringMap value type is specified as a string, but
+ # this shows the value type. The empty stringmap is "null" in
+ # JSON, but that converts to "{ }" here.
+ (if isAttrs option.value then "`\"\"`"
+ else "`" + toString option.value + "`")) + "\n\n"
+ else " **Default:** *machine-specific*\n")
+ (if option.aliases != []
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
else "")
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 5e61b2671..c1ce8aaeb 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -12,11 +12,13 @@ man-pages := $(foreach n, \
clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
# Provide a dummy environment for nix, so that it will not access files outside the macOS sandbox.
+# Set cores to 0 because otherwise nix show-config resolves the cores based on the current machine
dummy-env = env -i \
HOME=/dummy \
NIX_CONF_DIR=/dummy \
NIX_SSL_CERT_FILE=/dummy/no-ca-bundle.crt \
- NIX_STATE_DIR=/dummy
+ NIX_STATE_DIR=/dummy \
+ NIX_CONFIG='cores = 0'
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
@@ -44,7 +46,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
@rm -rf $@
- $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
+ $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }'
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
@@ -70,6 +72,7 @@ $(d)/builtins.json: $(bindir)/nix
@mv $@.tmp $@
# Generate the HTML manual.
+html: $(docdir)/manual/index.html
install: $(docdir)/manual/index.html
# Generate 'nix' manpages.
@@ -92,7 +95,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
rm $$tmpFile; \
done
- touch $@
+ @touch $@
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md)
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index df9209c7d..4e2afa20e 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -9,6 +9,7 @@
- [Prerequisites](installation/prerequisites-source.md)
- [Obtaining a Source Distribution](installation/obtaining-source.md)
- [Building Nix from Source](installation/building-source.md)
+ - [Using Nix within Docker](installation/installing-docker.md)
- [Security](installation/nix-security.md)
- [Single-User Mode](installation/single-user.md)
- [Multi-User Mode](installation/multi-user.md)
@@ -70,7 +71,10 @@
- [Hacking](contributing/hacking.md)
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
- - [Release 2.4 (2021-XX-XX)](release-notes/rl-2.4.md)
+ - [Release X.Y (202?-??-??)](release-notes/rl-next.md)
+ - [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md)
+ - [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md)
+ - [Release 2.4 (2021-11-01)](release-notes/rl-2.4.md)
- [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md)
- [Release 2.2 (2019-01-11)](release-notes/rl-2.2.md)
- [Release 2.1 (2018-09-02)](release-notes/rl-2.1.md)
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index 580b36736..c4c60db15 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -53,8 +53,8 @@ example, the following command allows you to build a derivation for
$ uname
Linux
-$ nix build \
- '(with import { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \
+$ nix build --impure \
+ --expr '(with import { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \
--builders 'ssh://mac x86_64-darwin'
[1/0/1 built, 0.0 MiB DL] building foo on ssh://mac
diff --git a/doc/manual/src/command-ref/conf-file-prefix.md b/doc/manual/src/command-ref/conf-file-prefix.md
index 3140170ab..44b7ba86d 100644
--- a/doc/manual/src/command-ref/conf-file-prefix.md
+++ b/doc/manual/src/command-ref/conf-file-prefix.md
@@ -16,8 +16,9 @@ By default Nix reads settings from the following places:
will be loaded in reverse order.
Otherwise it will look for `nix/nix.conf` files in `XDG_CONFIG_DIRS`
- and `XDG_CONFIG_HOME`. If these are unset, it will look in
- `$HOME/.config/nix.conf`.
+ and `XDG_CONFIG_HOME`. If unset, `XDG_CONFIG_DIRS` defaults to
+ `/etc/xdg`, and `XDG_CONFIG_HOME` defaults to `$HOME/.config`
+ as per [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
- If `NIX_CONFIG` is set, its contents is treated as the contents of
a configuration file.
diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md
index 9138fa05a..8d6abaf52 100644
--- a/doc/manual/src/command-ref/nix-env.md
+++ b/doc/manual/src/command-ref/nix-env.md
@@ -238,7 +238,16 @@ a number of possible ways:
## Examples
-To install a specific version of `gcc` from the active Nix expression:
+To install a package using a specific attribute path from the active Nix expression:
+
+```console
+$ nix-env -iA gcc40mips
+installing `gcc-4.0.2'
+$ nix-env -iA xorg.xorgserver
+installing `xorg-server-1.2.0'
+```
+
+To install a specific version of `gcc` using the derivation name:
```console
$ nix-env --install gcc-3.3.2
@@ -246,6 +255,9 @@ installing `gcc-3.3.2'
uninstalling `gcc-3.1'
```
+Using attribute path for selecting a package is preferred,
+as it is much faster and there will not be multiple matches.
+
Note the previously installed version is removed, since
`--preserve-installed` was not specified.
@@ -256,13 +268,6 @@ $ nix-env --install gcc
installing `gcc-3.3.2'
```
-To install using a specific attribute:
-
-```console
-$ nix-env -i -A gcc40mips
-$ nix-env -i -A xorg.xorgserver
-```
-
To install all derivations in the Nix expression `foo.nix`:
```console
@@ -374,22 +379,29 @@ For the other flags, see `--install`.
## Examples
```console
-$ nix-env --upgrade gcc
+$ nix-env --upgrade -A nixpkgs.gcc
upgrading `gcc-3.3.1' to `gcc-3.4'
```
+When there are no updates available, nothing will happen:
+
```console
-$ nix-env -u gcc-3.3.2 --always (switch to a specific version)
+$ nix-env --upgrade -A nixpkgs.pan
+```
+
+Using `-A` is preferred when possible, as it is faster and unambiguous but
+it is also possible to upgrade to a specific version by matching the derivation name:
+
+```console
+$ nix-env -u gcc-3.3.2 --always
upgrading `gcc-3.4' to `gcc-3.3.2'
```
-```console
-$ nix-env --upgrade pan
-(no upgrades available, so nothing happens)
-```
+To try to upgrade everything
+(matching packages based on the part of the derivation name without version):
```console
-$ nix-env -u (try to upgrade everything)
+$ nix-env -u
upgrading `hello-2.1.2' to `hello-2.1.3'
upgrading `mozilla-1.2' to `mozilla-1.4'
```
@@ -401,7 +413,7 @@ of a derivation `x` by looking at their respective `name` attributes.
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
(`gcc`), and the version (`3.3.1`). The version part starts after the
first dash not followed by a letter. `x` is considered an upgrade of `y`
-if their package names match, and the version of `y` is higher that that
+if their package names match, and the version of `y` is higher than that
of `x`.
The versions are compared by splitting them into contiguous components
diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md
index 72f6730f1..a2b6d8a8e 100644
--- a/doc/manual/src/command-ref/nix-shell.md
+++ b/doc/manual/src/command-ref/nix-shell.md
@@ -11,8 +11,8 @@
[`--command` *cmd*]
[`--run` *cmd*]
[`--exclude` *regexp*]
- [--pure]
- [--keep *name*]
+ [`--pure`]
+ [`--keep` *name*]
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
# Description
@@ -101,7 +101,8 @@ The following common options are supported:
- `NIX_BUILD_SHELL`\
Shell used to start the interactive environment. Defaults to the
- `bash` found in `PATH`.
+ `bash` found in ``, falling back to the `bash` found in
+ `PATH` if not found.
# Examples
@@ -110,13 +111,19 @@ shell in which to build it:
```console
$ nix-shell '' -A pan
-[nix-shell]$ unpackPhase
+[nix-shell]$ eval ${unpackPhase:-unpackPhase}
[nix-shell]$ cd pan-*
-[nix-shell]$ configurePhase
-[nix-shell]$ buildPhase
+[nix-shell]$ eval ${configurePhase:-configurePhase}
+[nix-shell]$ eval ${buildPhase:-buildPhase}
[nix-shell]$ ./pan/gui/pan
```
+The reason we use form `eval ${configurePhase:-configurePhase}` here is because
+those packages that override these phases do so by exporting the overridden
+values in the environment variable of the same name.
+Here bash is being told to either evaluate the contents of 'configurePhase',
+if it exists as a variable, otherwise evaluate the configurePhase function.
+
To clear the environment first, and do some additional automatic
initialisation of the interactive shell:
diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md
index 7a131dc02..7db9f0c1c 100644
--- a/doc/manual/src/command-ref/nix-store.md
+++ b/doc/manual/src/command-ref/nix-store.md
@@ -125,7 +125,7 @@ Special exit codes:
- `104`\
Not deterministic, the build succeeded in check mode but the
- resulting output is not binary reproducable.
+ resulting output is not binary reproducible.
With the `--keep-going` flag it's possible for multiple failures to
occur, in this case the 1xx status codes are or combined using binary
@@ -321,8 +321,8 @@ symlink.
This query has one option:
- `--include-outputs`
- Also include the output path of store derivations, and their
- closures.
+ Also include the existing output paths of store derivations,
+ and their closures.
This query can be used to implement various kinds of deployment. A
*source deployment* is obtained by distributing the closure of a
diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md
index 47862bc09..7ee1a26bc 100644
--- a/doc/manual/src/command-ref/opt-common.md
+++ b/doc/manual/src/command-ref/opt-common.md
@@ -162,11 +162,11 @@ Most Nix commands accept the following command-line options:
}: ...
```
- So if you call this Nix expression (e.g., when you do `nix-env -i
+ So if you call this Nix expression (e.g., when you do `nix-env -iA
pkgname`), the function will be called automatically using the
value [`builtins.currentSystem`](../expressions/builtins.md) for
the `system` argument. You can override this using `--arg`, e.g.,
- `nix-env -i pkgname --arg system \"i686-freebsd\"`. (Note that
+ `nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that
since the argument is a Nix string literal, you have to escape the
quotes.)
diff --git a/doc/manual/src/contributing/cli-guideline.md b/doc/manual/src/contributing/cli-guideline.md
index 0132867c8..01a1b1e73 100644
--- a/doc/manual/src/contributing/cli-guideline.md
+++ b/doc/manual/src/contributing/cli-guideline.md
@@ -3,7 +3,7 @@
## Goals
Purpose of this document is to provide a clear direction to **help design
-delightful command line** experience. This document contain guidelines to
+delightful command line** experience. This document contains guidelines to
follow to ensure a consistent and approachable user experience.
## Overview
@@ -103,7 +103,7 @@ impacted the most by bad user experience.
# Help is essential
Help should be built into your command line so that new users can gradually
-discover new features when they need them.
+discover new features when they need them.
## Looking for help
@@ -115,7 +115,7 @@ The rules are:
- Help is shown by using `--help` or `help` command (eg `nix` `--``help` or
`nix help`).
-- For non-COMMANDs (eg. `nix` `--``help` and `nix store` `--``help`) we **show
+- For non-COMMANDs (eg. `nix` `--``help` and `nix store` `--``help`) we **show
a summary** of most common use cases. Summary is presented on the STDOUT
without any use of PAGER.
- For COMMANDs (eg. `nix init` `--``help` or `nix help init`) we display the
@@ -176,7 +176,7 @@ $ nix init --template=template#pyton
------------------------------------------------------------------------
Initializing Nix project at `/path/to/here`.
Select a template for you new project:
- |> template#pyton
+ |> template#python
template#python-pip
template#python-poetry
```
@@ -230,17 +230,17 @@ Now **Learn** part of the output is where you educate users. You should only
show it when you know that a build will take some time and not annoy users of
the builds that take only few seconds.
-Every feature like this should go though a intensive review and testing to
-collect as much a feedback as possible and to fine tune every little detail. If
+Every feature like this should go through an intensive review and testing to
+collect as much feedback as possible and to fine tune every little detail. If
done right this can be an awesome features beginners and advance users will
love, but if not done perfectly it will annoy users and leave bad impression.
# Input
-Input to a command is provided via `ARGUMENTS` and `OPTIONS`.
+Input to a command is provided via `ARGUMENTS` and `OPTIONS`.
`ARGUMENTS` represent a required input for a function. When choosing to use
-`ARGUMENT` over function please be aware of the downsides that come with it:
+`ARGUMENTS` over `OPTIONS` please be aware of the downsides that come with it:
- User will need to remember the order of `ARGUMENTS`. This is not a problem if
there is only one `ARGUMENT`.
@@ -253,7 +253,7 @@ developer consider the downsides and choose wisely.
## Naming the `OPTIONS`
-Then only naming convention - apart from the ones mentioned in Naming the
+The only naming convention - apart from the ones mentioned in Naming the
`COMMANDS` section is how flags are named.
Flags are a type of `OPTION` that represent an option that can be turned ON of
@@ -271,12 +271,12 @@ to improve the discoverability of possible input. A new user will most likely
not know which `ARGUMENTS` and `OPTIONS` are required or which values are
possible for those options.
-In cases, the user might not provide the input or they provide wrong input,
-rather then show the error, prompt a user with an option to find and select
+In case the user does not provide the input or they provide wrong input,
+rather than show the error, prompt a user with an option to find and select
correct input (see examples).
Prompting is of course not required when TTY is not attached to STDIN. This
-would mean that scripts wont need to handle prompt, but rather handle errors.
+would mean that scripts won't need to handle prompt, but rather handle errors.
A place to use prompt and provide user with interactive select
@@ -300,9 +300,9 @@ going to happen.
```shell
$ nix build --option substitutors https://cache.example.org
------------------------------------------------------------------------
- Warning! A security related question need to be answered.
+ Warning! A security related question needs to be answered.
------------------------------------------------------------------------
- The following substitutors will be used to in `my-project`:
+ The following substitutors will be used to in `my-project`:
- https://cache.example.org
Do you allow `my-project` to use above mentioned substitutors?
@@ -311,14 +311,14 @@ $ nix build --option substitutors https://cache.example.org
# Output
-Terminal output can be quite limiting in many ways. Which should forces us to
+Terminal output can be quite limiting in many ways. Which should force us to
think about the experience even more. As with every design the output is a
compromise between being terse and being verbose, between showing help to
beginners and annoying advance users. For this it is important that we know
what are the priorities.
Nix command line should be first and foremost written with beginners in mind.
-But users wont stay beginners for long and what was once useful might quickly
+But users won't stay beginners for long and what was once useful might quickly
become annoying. There is no golden rule that we can give in this guideline
that would make it easier how to draw a line and find best compromise.
@@ -342,7 +342,7 @@ also allowing them to redirect content to a file. For example:
```shell
$ nix build > build.txt
------------------------------------------------------------------------
- Error! Atrribute `bin` missing at (1:94) from string.
+ Error! Attribute `bin` missing at (1:94) from string.
------------------------------------------------------------------------
1| with import { }; (pkgs.runCommandCC or pkgs.runCommand) "shell" { buildInputs = [ (surge.bin) ]; } ""
@@ -408,7 +408,7 @@ Above command clearly states that command successfully completed. And in case
of `nix build`, which is a command that might take some time to complete, it is
equally important to also show that a command started.
-## Text alignment
+## Text alignment
Text alignment is the number one design element that will present all of the
Nix commands as a family and not as separate tools glued together.
@@ -419,7 +419,7 @@ The format we should follow is:
$ nix COMMAND
VERB_1 NOUN and other words
VERB__1 NOUN and other words
- |> Some details
+ |> Some details
```
Few rules that we can extract from above example:
@@ -444,13 +444,13 @@ is not even notable, therefore relying on it wouldn’t make much sense.
**The bright text is much better supported** across terminals and color
schemes. Most of the time the difference is perceived as if the bright text
-would be bold.
+would be bold.
## Colors
Humans are already conditioned by society to attach certain meaning to certain
colors. While the meaning is not universal, a simple collection of colors is
-used to represent basic emotions.
+used to represent basic emotions.
Colors that can be used in output
@@ -508,7 +508,7 @@ can, with a few key strokes, be changed into and advance introspection tool.
### Progress
-For longer running commands we should provide and overview of the progress.
+For longer running commands we should provide and overview the progress.
This is shown best in `nix build` example:
```shell
@@ -553,9 +553,9 @@ going to happen.
```shell
$ nix build --option substitutors https://cache.example.org
------------------------------------------------------------------------
- Warning! A security related question need to be answered.
+ Warning! A security related question needs to be answered.
------------------------------------------------------------------------
- The following substitutors will be used to in `my-project`:
+ The following substitutors will be used to in `my-project`:
- https://cache.example.org
Do you allow `my-project` to use above mentioned substitutors?
@@ -566,7 +566,7 @@ $ nix build --option substitutors https://cache.example.org
There are many ways that you can control verbosity.
-Verbosity levels are:
+Verbosity levels are:
- `ERROR` (level 0)
- `WARN` (level 1)
@@ -586,4 +586,4 @@ There are also two shortcuts, `--debug` to run in `DEBUG` verbosity level and
# Appendix 1: Commands naming exceptions
-`nix init` and `nix repl` are well established
+`nix init` and `nix repl` are well established
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 2a1e55e5b..90a8f1f94 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -35,6 +35,25 @@ variables are set up so that those dependencies can be found:
$ nix-shell
```
+or if you have a flake-enabled nix:
+
+```console
+$ nix develop
+```
+
+To get a shell with a different compilation environment (e.g. stdenv,
+gccStdenv, clangStdenv, clang11Stdenv):
+
+```console
+$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
+```
+
+or if you have a flake-enabled nix:
+
+```console
+$ nix develop .#clang11StdenvPackages
+```
+
To build Nix itself in this shell:
```console
diff --git a/doc/manual/src/expressions/advanced-attributes.md b/doc/manual/src/expressions/advanced-attributes.md
index 5b208df67..000595815 100644
--- a/doc/manual/src/expressions/advanced-attributes.md
+++ b/doc/manual/src/expressions/advanced-attributes.md
@@ -237,7 +237,7 @@ Derivations can declare some infrequently used optional attributes.
- `preferLocalBuild`\
If this attribute is set to `true` and [distributed building is
enabled](../advanced-topics/distributed-builds.md), then, if
- possible, the derivaton will be built locally instead of forwarded
+ possible, the derivation will be built locally instead of forwarded
to a remote machine. This is appropriate for trivial builders
where the cost of doing a download or remote build would exceed
the cost of building locally.
diff --git a/doc/manual/src/expressions/builtins-prefix.md b/doc/manual/src/expressions/builtins-prefix.md
index 87127de2a..c631a8453 100644
--- a/doc/manual/src/expressions/builtins-prefix.md
+++ b/doc/manual/src/expressions/builtins-prefix.md
@@ -12,5 +12,5 @@ For instance, `derivation` is also available as `builtins.derivation`.
derivation attrs
;
builtins.derivation attrs
- derivation in described in
+
derivation is described in
its own section.
diff --git a/doc/manual/src/expressions/expression-syntax.md b/doc/manual/src/expressions/expression-syntax.md
index 2a1306e32..6b93e692c 100644
--- a/doc/manual/src/expressions/expression-syntax.md
+++ b/doc/manual/src/expressions/expression-syntax.md
@@ -26,7 +26,7 @@ elements (referenced from the figure by number):
called with three arguments: `stdenv`, `fetchurl`, and `perl`. They
are needed to build Hello, but we don't know how to build them here;
that's why they are function arguments. `stdenv` is a package that
- is used by almost all Nix Packages packages; it provides a
+ is used by almost all Nix Packages; it provides a
“standard” environment consisting of the things you would expect
in a basic Unix environment: a C/C++ compiler (GCC, to be precise),
the Bash shell, fundamental Unix tools such as `cp`, `grep`, `tar`,
diff --git a/doc/manual/src/expressions/language-constructs.md b/doc/manual/src/expressions/language-constructs.md
index cb0169239..1c01f2cc7 100644
--- a/doc/manual/src/expressions/language-constructs.md
+++ b/doc/manual/src/expressions/language-constructs.md
@@ -284,6 +284,10 @@ The points of interest are:
function is called with the `localServer` argument set to `true` but
the `db4` argument set to `null`, then the evaluation fails.
+ Note that `->` is the [logical
+ implication](https://en.wikipedia.org/wiki/Truth_table#Logical_implication)
+ Boolean operation.
+
2. This is a more subtle condition: if Subversion is built with Apache
(`httpServer`) support, then the Expat library (an XML library) used
by Subversion should be same as the one used by Apache. This is
diff --git a/doc/manual/src/expressions/language-operators.md b/doc/manual/src/expressions/language-operators.md
index b7fd6f4c6..268b44f4c 100644
--- a/doc/manual/src/expressions/language-operators.md
+++ b/doc/manual/src/expressions/language-operators.md
@@ -17,12 +17,12 @@ order of precedence (from strongest to weakest binding).
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
| Not | `!` *e* | none | Boolean negation. | 8 |
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
-| Less Than | *e1* `<` *e2*, | none | Arithmetic comparison. | 10 |
-| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic comparison. | 10 |
-| Greater Than | *e1* `>` *e2* | none | Arithmetic comparison. | 10 |
-| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic comparison. | 10 |
+| Less Than | *e1* `<` *e2*, | none | Arithmetic/lexicographic comparison. | 10 |
+| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
+| Greater Than | *e1* `>` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
+| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
-| Logical OR | *e1* `\|\|` *e2* | left | Logical OR. | 13 |
-| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to `!e1 \|\| e2`). | 14 |
+| Logical OR | *e1* ||
*e2* | left | Logical OR. | 13 |
+| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to !e1 || e2
). | 14 |
diff --git a/doc/manual/src/expressions/language-values.md b/doc/manual/src/expressions/language-values.md
index 28fa23b58..75ae9f2eb 100644
--- a/doc/manual/src/expressions/language-values.md
+++ b/doc/manual/src/expressions/language-values.md
@@ -64,7 +64,7 @@ Nix has the following basic data types:
the start of each line. To be precise, it strips from each line a
number of spaces equal to the minimal indentation of the string as a
whole (disregarding the indentation of empty lines). For instance,
- the first and second line are indented two space, while the third
+ the first and second line are indented two spaces, while the third
line is indented four spaces. Thus, two spaces are stripped from
each line, so the resulting string is
diff --git a/doc/manual/src/expressions/simple-building-testing.md b/doc/manual/src/expressions/simple-building-testing.md
index 6f730a936..7f0d8f841 100644
--- a/doc/manual/src/expressions/simple-building-testing.md
+++ b/doc/manual/src/expressions/simple-building-testing.md
@@ -1,6 +1,6 @@
# Building and Testing
-You can now try to build Hello. Of course, you could do `nix-env -i
+You can now try to build Hello. Of course, you could do `nix-env -f . -iA
hello`, but you may not want to install a possibly broken package just
yet. The best way to test the package is by using the command
`nix-build`, which builds a Nix expression and creates a symlink named
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index bb350d9de..71ff13275 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -47,7 +47,7 @@
the store object at `P` contains the path `Q` somewhere. The
*references* of a store path are the set of store paths to which it
has a reference.
-
+
A derivation can reference other derivations and sources (but not
output paths), whereas an output path only references other output
paths.
@@ -66,7 +66,7 @@
is necessary to deploy whole closures, since otherwise at runtime
files could be missing. The command `nix-store -qR` prints out
closures of store paths.
-
+
As an example, if the store object at path `P` contains a reference
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
references `R` then `R` is also in the closure of `P`.
@@ -98,3 +98,7 @@
store. It can contain regular files, directories and symbolic
links. NARs are generated and unpacked using `nix-store --dump`
and `nix-store --restore`.
+ - `∅` \
+ The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
+ - `ε` \
+ The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
diff --git a/doc/manual/src/installation/building-source.md b/doc/manual/src/installation/building-source.md
index d21a51a82..ed1efffd8 100644
--- a/doc/manual/src/installation/building-source.md
+++ b/doc/manual/src/installation/building-source.md
@@ -1,9 +1,9 @@
# Building Nix from Source
-After unpacking or checking out the Nix sources, issue the following
-commands:
+After cloning Nix's Git repository, issue the following commands:
```console
+$ ./bootstrap.sh
$ ./configure options...
$ make
$ make install
@@ -11,13 +11,6 @@ $ make install
Nix requires GNU Make so you may need to invoke `gmake` instead.
-When building from the Git repository, these should be preceded by the
-command:
-
-```console
-$ ./bootstrap.sh
-```
-
The installation path can be specified by passing the `--prefix=prefix`
to `configure`. The default installation directory is `/usr/local`. You
can change this to any location you like. You must have write permission
diff --git a/doc/manual/src/installation/env-variables.md b/doc/manual/src/installation/env-variables.md
index 4a49897e4..bb35c0e9f 100644
--- a/doc/manual/src/installation/env-variables.md
+++ b/doc/manual/src/installation/env-variables.md
@@ -40,7 +40,7 @@ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt
> **Note**
>
> You must not add the export and then do the install, as the Nix
-> installer will detect the presense of Nix configuration, and abort.
+> installer will detect the presence of Nix configuration, and abort.
## `NIX_SSL_CERT_FILE` with macOS and the Nix daemon
diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md
index 96fa34635..4367654a2 100644
--- a/doc/manual/src/installation/installing-binary.md
+++ b/doc/manual/src/installation/installing-binary.md
@@ -119,6 +119,30 @@ this to run the installer, but it may help if you run into trouble:
- update `/etc/synthetic.conf` to direct macOS to create a "synthetic"
empty root directory to mount your volume
- specify mount options for the volume in `/etc/fstab`
+ - `rw`: read-write
+ - `noauto`: prevent the system from auto-mounting the volume (so the
+ LaunchDaemon mentioned below can control mounting it, and to avoid
+ masking problems with that mounting service).
+ - `nobrowse`: prevent the Nix Store volume from showing up on your
+ desktop; also keeps Spotlight from spending resources to index
+ this volume
+
- if you have FileVault enabled
- generate an encryption password
- put it in your system Keychain
diff --git a/doc/manual/src/installation/installing-docker.md b/doc/manual/src/installation/installing-docker.md
new file mode 100644
index 000000000..9d6d8f2d9
--- /dev/null
+++ b/doc/manual/src/installation/installing-docker.md
@@ -0,0 +1,59 @@
+# Using Nix within Docker
+
+To run the latest stable release of Nix with Docker run the following command:
+
+```console
+$ docker run -ti nixos/nix
+Unable to find image 'nixos/nix:latest' locally
+latest: Pulling from nixos/nix
+5843afab3874: Pull complete
+b52bf13f109c: Pull complete
+1e2415612aa3: Pull complete
+Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
+Status: Downloaded newer image for nixos/nix:latest
+35ca4ada6e96:/# nix --version
+nix (Nix) 2.3.12
+35ca4ada6e96:/# exit
+```
+
+# What is included in Nix's Docker image?
+
+The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`
+(and not with `Dockerfile` as it is usual with Docker images). You can still
+base your custom Docker image on it as you would do with any other Docker
+image.
+
+The Docker image is also not based on any other image and includes minimal set
+of runtime dependencies that are required to use Nix:
+
+ - pkgs.nix
+ - pkgs.bashInteractive
+ - pkgs.coreutils-full
+ - pkgs.gnutar
+ - pkgs.gzip
+ - pkgs.gnugrep
+ - pkgs.which
+ - pkgs.curl
+ - pkgs.less
+ - pkgs.wget
+ - pkgs.man
+ - pkgs.cacert.out
+ - pkgs.findutils
+
+# Docker image with the latest development version of Nix
+
+To get the latest image that was built by [Hydra](https://hydra.nixos.org) run
+the following command:
+
+```console
+$ curl -L https://hydra.nixos.org/job/nix/master/dockerImage.x86_64-linux/latest/download/1 | docker load
+$ docker run -ti nix:2.5pre20211105
+```
+
+You can also build a Docker image from source yourself:
+
+```console
+$ nix build ./\#hydraJobs.dockerImage.x86_64-linux
+$ docker load -i ./result/image.tar.gz
+$ docker run -ti nix:2.5pre20211105
+```
diff --git a/doc/manual/src/installation/installing-source.md b/doc/manual/src/installation/installing-source.md
index e52d38a03..09b4e4887 100644
--- a/doc/manual/src/installation/installing-source.md
+++ b/doc/manual/src/installation/installing-source.md
@@ -1,4 +1,4 @@
# Installing Nix from Source
-If no binary package is available, you can download and compile a source
-distribution.
+If no binary package is available or if you want to hack on Nix, you
+can build Nix from its Git repository.
diff --git a/doc/manual/src/installation/obtaining-source.md b/doc/manual/src/installation/obtaining-source.md
index 0a906e390..da05d243d 100644
--- a/doc/manual/src/installation/obtaining-source.md
+++ b/doc/manual/src/installation/obtaining-source.md
@@ -1,14 +1,9 @@
-# Obtaining a Source Distribution
+# Obtaining the Source
-The source tarball of the most recent stable release can be downloaded
-from the [Nix homepage](http://nixos.org/nix/download.html). You can
-also grab the [most recent development
-release](http://hydra.nixos.org/job/nix/master/release/latest-finished#tabs-constituents).
-
-Alternatively, the most recent sources of Nix can be obtained from its
-[Git repository](https://github.com/NixOS/nix). For example, the
-following command will check out the latest revision into a directory
-called `nix`:
+The most recent sources of Nix can be obtained from its [Git
+repository](https://github.com/NixOS/nix). For example, the following
+command will check out the latest revision into a directory called
+`nix`:
```console
$ git clone https://github.com/NixOS/nix
diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md
index 40cb79627..6f4eb3008 100644
--- a/doc/manual/src/installation/prerequisites-source.md
+++ b/doc/manual/src/installation/prerequisites-source.md
@@ -2,9 +2,8 @@
- GNU Autoconf () and the
autoconf-archive macro collection
- (). These are only
- needed to run the bootstrap script, and are not necessary if your
- source distribution came with a pre-built `./configure` script.
+ (). These are
+ needed to run the bootstrap script.
- GNU Make.
@@ -45,6 +44,11 @@
obtained from the its repository
.
+ - The `libsodium` library for verifying cryptographic signatures
+ of contents fetched from binary caches.
+ It can be obtained from the official web site
+ .
+
- Recent versions of Bison and Flex to build the parser. (This is
because Nix needs GLR support in Bison and reentrancy support in
Flex.) For Bison, you need version 2.6, which can be obtained from
@@ -52,11 +56,18 @@
you need version 2.5.35, which is available on
[SourceForge](http://lex.sourceforge.net/). Slightly older versions
may also work, but ancient versions like the ubiquitous 2.5.4a
- won't. Note that these are only required if you modify the parser or
- when you are building from the Git repository.
+ won't.
- The `libseccomp` is used to provide syscall filtering on Linux. This
is an optional dependency and can be disabled passing a
`--disable-seccomp-sandboxing` option to the `configure` script (Not
recommended unless your system doesn't support `libseccomp`). To get
the library, visit .
+
+ - On 64-bit x86 machines only, `libcpuid` library
+ is used to determine which microarchitecture levels are supported
+ (e.g., as whether to have `x86_64-v2-linux` among additional system types).
+ The library is available from its homepage
+ .
+ This is an optional dependency and can be disabled
+ by providing a `--disable-cpuid` to the `configure` script.
diff --git a/doc/manual/src/installation/supported-platforms.md b/doc/manual/src/installation/supported-platforms.md
index 8ef1f0e78..8ca3ce8d4 100644
--- a/doc/manual/src/installation/supported-platforms.md
+++ b/doc/manual/src/installation/supported-platforms.md
@@ -4,4 +4,4 @@ Nix is currently supported on the following platforms:
- Linux (i686, x86\_64, aarch64).
- - macOS (x86\_64).
+ - macOS (x86\_64, aarch64).
diff --git a/doc/manual/src/introduction.md b/doc/manual/src/introduction.md
index d68445c95..d87487a07 100644
--- a/doc/manual/src/introduction.md
+++ b/doc/manual/src/introduction.md
@@ -76,7 +76,7 @@ there after an upgrade. This means that you can _roll back_ to the
old version:
```console
-$ nix-env --upgrade some-packages
+$ nix-env --upgrade -A nixpkgs.some-package
$ nix-env --rollback
```
@@ -122,12 +122,12 @@ Nix expressions generally describe how to build a package from
source, so an installation action like
```console
-$ nix-env --install firefox
+$ nix-env --install -A nixpkgs.firefox
```
_could_ cause quite a bit of build activity, as not only Firefox but
also all its dependencies (all the way up to the C library and the
-compiler) would have to built, at least if they are not already in the
+compiler) would have to be built, at least if they are not already in the
Nix store. This is a _source deployment model_. For most users,
building from source is not very pleasant as it takes far too long.
However, Nix can automatically skip building from source and instead
diff --git a/doc/manual/src/package-management/basic-package-mgmt.md b/doc/manual/src/package-management/basic-package-mgmt.md
index 9702a29eb..5f1d7a89c 100644
--- a/doc/manual/src/package-management/basic-package-mgmt.md
+++ b/doc/manual/src/package-management/basic-package-mgmt.md
@@ -24,7 +24,7 @@ collection; you could write your own Nix expressions based on Nixpkgs,
or completely new ones.)
You can manually download the latest version of Nixpkgs from
-. However, it’s much more
+. However, it’s much more
convenient to use the Nixpkgs [*channel*](channels.md), since it makes
it easy to stay up to date with new versions of Nixpkgs. Nixpkgs is
automatically added to your list of “subscribed” channels when you
@@ -40,48 +40,52 @@ $ nix-channel --update
>
> On NixOS, you’re automatically subscribed to a NixOS channel
> corresponding to your NixOS major release (e.g.
-> ). A NixOS channel is identical
+> ). A NixOS channel is identical
> to the Nixpkgs channel, except that it contains only Linux binaries
> and is updated only if a set of regression tests succeed.
You can view the set of available packages in Nixpkgs:
```console
-$ nix-env -qa
-aterm-2.2
-bash-3.0
-binutils-2.15
-bison-1.875d
-blackdown-1.4.2
-bzip2-1.0.2
+$ nix-env -qaP
+nixpkgs.aterm aterm-2.2
+nixpkgs.bash bash-3.0
+nixpkgs.binutils binutils-2.15
+nixpkgs.bison bison-1.875d
+nixpkgs.blackdown blackdown-1.4.2
+nixpkgs.bzip2 bzip2-1.0.2
…
```
-The flag `-q` specifies a query operation, and `-a` means that you want
+The flag `-q` specifies a query operation, `-a` means that you want
to show the “available” (i.e., installable) packages, as opposed to the
-installed packages. If you downloaded Nixpkgs yourself, or if you
-checked it out from GitHub, then you need to pass the path to your
-Nixpkgs tree using the `-f` flag:
+installed packages, and `-P` prints the attribute paths that can be used
+to unambiguously select a package for installation (listed in the first column).
+If you downloaded Nixpkgs yourself, or if you checked it out from GitHub,
+then you need to pass the path to your Nixpkgs tree using the `-f` flag:
```console
-$ nix-env -qaf /path/to/nixpkgs
+$ nix-env -qaPf /path/to/nixpkgs
+aterm aterm-2.2
+bash bash-3.0
+…
```
where */path/to/nixpkgs* is where you’ve unpacked or checked out
Nixpkgs.
-You can select specific packages by name:
+You can filter the packages by name:
```console
-$ nix-env -qa firefox
-firefox-34.0.5
-firefox-with-plugins-34.0.5
+$ nix-env -qaP firefox
+nixpkgs.firefox-esr firefox-91.3.0esr
+nixpkgs.firefox firefox-94.0.1
```
and using regular expressions:
```console
-$ nix-env -qa 'firefox.*'
+$ nix-env -qaP 'firefox.*'
```
It is also possible to see the *status* of available packages, i.e.,
@@ -89,11 +93,11 @@ whether they are installed into the user environment and/or present in
the system:
```console
-$ nix-env -qas
+$ nix-env -qaPs
…
--PS bash-3.0
---S binutils-2.15
-IPS bison-1.875d
+-PS nixpkgs.bash bash-3.0
+--S nixpkgs.binutils binutils-2.15
+IPS nixpkgs.bison bison-1.875d
…
```
@@ -106,13 +110,13 @@ which is Nix’s mechanism for doing binary deployment. It just means that
Nix knows that it can fetch a pre-built package from somewhere
(typically a network server) instead of building it locally.
-You can install a package using `nix-env -i`. For instance,
+You can install a package using `nix-env -iA`. For instance,
```console
-$ nix-env -i subversion
+$ nix-env -iA nixpkgs.subversion
```
-will install the package called `subversion` (which is, of course, the
+will install the package called `subversion` from `nixpkgs` channel (which is, of course, the
[Subversion version management system](http://subversion.tigris.org/)).
> **Note**
@@ -122,7 +126,7 @@ will install the package called `subversion` (which is, of course, the
> binary cache ; it contains binaries for most
> packages in Nixpkgs. Only if no binary is available in the binary
> cache, Nix will build the package from source. So if `nix-env
-> -i subversion` results in Nix building stuff from source, then either
+> -iA nixpkgs.subversion` results in Nix building stuff from source, then either
> the package is not built for your platform by the Nixpkgs build
> servers, or your version of Nixpkgs is too old or too new. For
> instance, if you have a very recent checkout of Nixpkgs, then the
@@ -133,7 +137,10 @@ will install the package called `subversion` (which is, of course, the
> using a Git checkout of the Nixpkgs tree), you will get binaries for
> most packages.
-Naturally, packages can also be uninstalled:
+Naturally, packages can also be uninstalled. Unlike when installing, you will
+need to use the derivation name (though the version part can be omitted),
+instead of the attribute path, as `nix-env` does not record which attribute
+was used for installing:
```console
$ nix-env -e subversion
@@ -143,7 +150,7 @@ Upgrading to a new version is just as easy. If you have a new release of
Nix Packages, you can do:
```console
-$ nix-env -u subversion
+$ nix-env -uA nixpkgs.subversion
```
This will *only* upgrade Subversion if there is a “newer” version in the
diff --git a/doc/manual/src/package-management/binary-cache-substituter.md b/doc/manual/src/package-management/binary-cache-substituter.md
index bdc5038fc..ef738794b 100644
--- a/doc/manual/src/package-management/binary-cache-substituter.md
+++ b/doc/manual/src/package-management/binary-cache-substituter.md
@@ -9,7 +9,7 @@ The daemon that handles binary cache requests via HTTP, `nix-serve`, is
not part of the Nix distribution, but you can install it from Nixpkgs:
```console
-$ nix-env -i nix-serve
+$ nix-env -iA nixpkgs.nix-serve
```
You can then start the server, listening for HTTP connections on
@@ -35,7 +35,7 @@ On the client side, you can tell Nix to use your binary cache using
`--option extra-binary-caches`, e.g.:
```console
-$ nix-env -i firefox --option extra-binary-caches http://avalon:8080/
+$ nix-env -iA nixpkgs.firefox --option extra-binary-caches http://avalon:8080/
```
The option `extra-binary-caches` tells Nix to use this binary cache in
diff --git a/doc/manual/src/package-management/garbage-collection.md b/doc/manual/src/package-management/garbage-collection.md
index fecb30fd6..29a3b3101 100644
--- a/doc/manual/src/package-management/garbage-collection.md
+++ b/doc/manual/src/package-management/garbage-collection.md
@@ -44,7 +44,7 @@ collector as follows:
$ nix-store --gc
```
-The behaviour of the gargage collector is affected by the
+The behaviour of the garbage collector is affected by the
`keep-derivations` (default: true) and `keep-outputs` (default: false)
options in the Nix configuration file. The defaults will ensure that all
derivations that are build-time dependencies of garbage collector roots
diff --git a/doc/manual/src/package-management/profiles.md b/doc/manual/src/package-management/profiles.md
index fbbfb7320..d1a2580d4 100644
--- a/doc/manual/src/package-management/profiles.md
+++ b/doc/manual/src/package-management/profiles.md
@@ -39,7 +39,7 @@ just Subversion 1.1.2 (arrows in the figure indicate symlinks). This
would be what we would obtain if we had done
```console
-$ nix-env -i subversion
+$ nix-env -iA nixpkgs.subversion
```
on a set of Nix expressions that contained Subversion 1.1.2.
@@ -54,7 +54,7 @@ environment is generated based on the current one. For instance,
generation 43 was created from generation 42 when we did
```console
-$ nix-env -i subversion firefox
+$ nix-env -iA nixpkgs.subversion nixpkgs.firefox
```
on a set of Nix expressions that contained Firefox and a new version of
@@ -127,7 +127,7 @@ All `nix-env` operations work on the profile pointed to by
(abbreviation `-p`):
```console
-$ nix-env -p /nix/var/nix/profiles/other-profile -i subversion
+$ nix-env -p /nix/var/nix/profiles/other-profile -iA nixpkgs.subversion
```
This will *not* change the `~/.nix-profile` symlink.
diff --git a/doc/manual/src/package-management/ssh-substituter.md b/doc/manual/src/package-management/ssh-substituter.md
index 6e5e258bc..c59933f61 100644
--- a/doc/manual/src/package-management/ssh-substituter.md
+++ b/doc/manual/src/package-management/ssh-substituter.md
@@ -6,7 +6,7 @@ automatically fetching any store paths in Firefox’s closure if they are
available on the server `avalon`:
```console
-$ nix-env -i firefox --substituters ssh://alice@avalon
+$ nix-env -iA nixpkgs.firefox --substituters ssh://alice@avalon
```
This works similar to the binary cache substituter that Nix usually
diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md
index 71205923b..b54e73500 100644
--- a/doc/manual/src/quick-start.md
+++ b/doc/manual/src/quick-start.md
@@ -19,19 +19,19 @@ to subsequent chapters.
channel:
```console
- $ nix-env -qa
- docbook-xml-4.3
- docbook-xml-4.5
- firefox-33.0.2
- hello-2.9
- libxslt-1.1.28
+ $ nix-env -qaP
+ nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
+ nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
+ nixpkgs.firefox firefox-33.0.2
+ nixpkgs.hello hello-2.9
+ nixpkgs.libxslt libxslt-1.1.28
…
```
1. Install some packages from the channel:
```console
- $ nix-env -i hello
+ $ nix-env -iA nixpkgs.hello
```
This should download pre-built packages; it should not build them
diff --git a/doc/manual/src/release-notes/rl-2.4.md b/doc/manual/src/release-notes/rl-2.4.md
index f7ab9f6ad..8b566fc7b 100644
--- a/doc/manual/src/release-notes/rl-2.4.md
+++ b/doc/manual/src/release-notes/rl-2.4.md
@@ -1,8 +1,542 @@
-# Release 2.4 (202X-XX-XX)
+# Release 2.4 (2021-11-01)
- - It is now an error to modify the `plugin-files` setting via a
- command-line flag that appears after the first non-flag argument
- to any command, including a subcommand to `nix`. For example,
- `nix-instantiate default.nix --plugin-files ""` must now become
- `nix-instantiate --plugin-files "" default.nix`.
- - Plugins that add new `nix` subcommands are now actually respected.
+This is the first release in more than two years and is the result of
+more than 2800 commits from 195 contributors since release 2.3.
+
+## Highlights
+
+* Nix's **error messages** have been improved a lot. For instance,
+ evaluation errors now point out the location of the error:
+
+ ```
+ $ nix build
+ error: undefined variable 'bzip3'
+
+ at /nix/store/449lv242z0zsgwv95a8124xi11sp419f-source/flake.nix:88:13:
+
+ 87| [ curl
+ 88| bzip3 xz brotli editline
+ | ^
+ 89| openssl sqlite
+ ```
+
+* The **`nix` command** has seen a lot of work and is now almost at
+ feature parity with the old command-line interface (the `nix-*`
+ commands). It aims to be [more modern, consistent and pleasant to
+ use](../contributing/cli-guideline.md) than the old CLI. It is still
+ marked as experimental but its interface should not change much
+ anymore in future releases.
+
+* **Flakes** are a new format to package Nix-based projects in a more
+ discoverable, composable, consistent and reproducible way. A flake
+ is just a repository or tarball containing a file named `flake.nix`
+ that specifies dependencies on other flakes and returns any Nix
+ assets such as packages, Nixpkgs overlays, NixOS modules or CI
+ tests. The new `nix` CLI is primarily based around flakes; for
+ example, a command like `nix run nixpkgs#hello` runs the `hello`
+ application from the `nixpkgs` flake.
+
+ Flakes are currently marked as experimental. For an introduction,
+ see [this blog
+ post](https://www.tweag.io/blog/2020-05-25-flakes/). For detailed
+ information about flake syntax and semantics, see the [`nix flake`
+ manual page](../command-ref/new-cli/nix3-flake.md).
+
+* Nix's store can now be **content-addressed**, meaning that the hash
+ component of a store path is the hash of the path's
+ contents. Previously Nix could only build **input-addressed** store
+ paths, where the hash is computed from the derivation dependency
+ graph. Content-addressing allows deduplication, early cutoff in
+ build systems, and unprivileged closure copying. This is still [an
+ experimental
+ feature](https://discourse.nixos.org/t/content-addressed-nix-call-for-testers/12881).
+
+* The Nix manual has been converted into Markdown, making it easier to
+ contribute. In addition, every `nix` subcommand now has a manual
+ page, documenting every option.
+
+* A new setting that allows **experimental features** to be enabled
+ selectively. This allows us to merge unstable features into Nix more
+ quickly and do more frequent releases.
+
+## Other features
+
+* There are many new `nix` subcommands:
+
+ - `nix develop` is intended to replace `nix-shell`. It has a number
+ of new features:
+
+ * It automatically sets the output environment variables (such as
+ `$out`) to writable locations (such as `./outputs/out`).
+
+ * It can store the environment in a profile. This is useful for
+ offline work.
+
+ * It can run specific phases directly. For instance, `nix develop
+ --build` runs `buildPhase`.
+
+ - It allows dependencies in the Nix store to be "redirected" to
+ arbitrary directories using the `--redirect` flag. This is
+ useful if you want to hack on a package *and* some of its
+ dependencies at the same time.
+
+ - `nix print-dev-env` prints the environment variables and bash
+ functions defined by a derivation. This is useful for users of
+ other shells than bash (especially with `--json`).
+
+ - `nix shell` was previously named `nix run` and is intended to
+ replace `nix-shell -p`, but without the `stdenv` overhead. It
+ simply starts a shell where some packages have been added to
+ `$PATH`.
+
+ - `nix run` (not to be confused with the old subcommand that has
+ been renamed to `nix shell`) runs an "app", a flake output that
+ specifies a command to run, or an eponymous program from a
+ package. For example, `nix run nixpkgs#hello` runs the `hello`
+ program from the `hello` package in `nixpkgs`.
+
+ - `nix flake` is the container for flake-related operations, such as
+ creating a new flake, querying the contents of a flake or updating
+ flake lock files.
+
+ - `nix registry` allows you to query and update the flake registry,
+ which maps identifiers such as `nixpkgs` to concrete flake URLs.
+
+ - `nix profile` is intended to replace `nix-env`. Its main advantage
+ is that it keeps track of the provenance of installed packages
+ (e.g. exactly which flake version a package came from). It also
+ has some helpful subcommands:
+
+ * `nix profile history` shows what packages were added, upgraded
+ or removed between each version of a profile.
+
+ * `nix profile diff-closures` shows the changes between the
+ closures of each version of a profile. This allows you to
+ discover the addition or removal of dependencies or size
+ changes.
+
+ **Warning**: after a profile has been updated using `nix profile`,
+ it is no longer usable with `nix-env`.
+
+ - `nix store diff-closures` shows the differences between the
+ closures of two store paths in terms of the versions and sizes of
+ dependencies in the closures.
+
+ - `nix store make-content-addressable` rewrites an arbitrary closure
+ to make it content-addressed. Such paths can be copied into other
+ stores without requiring signatures.
+
+ - `nix bundle` uses the [`nix-bundle`
+ program](https://github.com/matthewbauer/nix-bundle) to convert a
+ closure into a self-extracting executable.
+
+ - Various other replacements for the old CLI, e.g. `nix store gc`,
+ `nix store delete`, `nix store repair`, `nix nar dump-path`, `nix
+ store prefetch-file`, `nix store prefetch-tarball`, `nix key` and
+ `nix daemon`.
+
+* Nix now has an **evaluation cache** for flake outputs. For example,
+ a second invocation of the command `nix run nixpkgs#firefox` will
+ not need to evaluate the `firefox` attribute because it's already in
+ the evaluation cache. This is made possible by the hermetic
+ evaluation model of flakes.
+
+* The new `--offline` flag disables substituters and causes all
+ locally cached tarballs and repositories to be considered
+ up-to-date.
+
+* The new `--refresh` flag causes all locally cached tarballs and
+ repositories to be considered out-of-date.
+
+* Many `nix` subcommands now have a `--json` option to produce
+ machine-readable output.
+
+* `nix repl` has a new `:doc` command to show documentation about
+ builtin functions (e.g. `:doc builtins.map`).
+
+* Binary cache stores now have an option `index-debug-info` to create
+ an index of DWARF debuginfo files for use by
+ [`dwarffs`](https://github.com/edolstra/dwarffs).
+
+* To support flakes, Nix now has an extensible mechanism for fetching
+ source trees. Currently it has the following backends:
+
+ * Git repositories
+
+ * Mercurial repositories
+
+ * GitHub and GitLab repositories (an optimisation for faster
+ fetching than Git)
+
+ * Tarballs
+
+ * Arbitrary directories
+
+ The fetcher infrastructure is exposed via flake input specifications
+ and via the `fetchTree` built-in.
+
+* **Languages changes**: the only new language feature is that you can
+ now have antiquotations in paths, e.g. `./${foo}` instead of `./. +
+ foo`.
+
+* **New built-in functions**:
+
+ - `builtins.fetchTree` allows fetching a source tree using any
+ backends supported by the fetcher infrastructure. It subsumes the
+ functionality of existing built-ins like `fetchGit`,
+ `fetchMercurial` and `fetchTarball`.
+
+ - `builtins.getFlake` fetches a flake and returns its output
+ attributes. This function should not be used inside flakes! Use
+ flake inputs instead.
+
+ - `builtins.floor` and `builtins.ceil` round a floating-point number
+ down and up, respectively.
+
+* Experimental support for recursive Nix. This means that Nix
+ derivations can now call Nix to build other derivations. This is not
+ in a stable state yet and not well
+ [documented](https://github.com/NixOS/nix/commit/c4d7c76b641d82b2696fef73ce0ac160043c18da).
+
+* The new experimental feature `no-url-literals` disables URL
+ literals. This helps to implement [RFC
+ 45](https://github.com/NixOS/rfcs/pull/45).
+
+* Nix now uses `libarchive` to decompress and unpack tarballs and zip
+ files, so `tar` is no longer required.
+
+* The priority of substituters can now be overridden using the
+ `priority` substituter setting (e.g. `--substituters
+ 'http://cache.nixos.org?priority=100 daemon?priority=10'`).
+
+* `nix edit` now supports non-derivation attributes, e.g. `nix edit
+ .#nixosConfigurations.bla`.
+
+* The `nix` command now provides command line completion for `bash`,
+ `zsh` and `fish`. Since the support for getting completions is built
+ into `nix`, it's easy to add support for other shells.
+
+* The new `--log-format` flag selects what Nix's output looks like. It
+ defaults to a terse progress indicator. There is a new
+ `internal-json` output format for use by other programs.
+
+* `nix eval` has a new `--apply` flag that applies a function to the
+ evaluation result.
+
+* `nix eval` has a new `--write-to` flag that allows it to write a
+ nested attribute set of string leaves to a corresponding directory
+ tree.
+
+* Memory improvements: many operations that add paths to the store or
+ copy paths between stores now run in constant memory.
+
+* Many `nix` commands now support the flag `--derivation` to operate
+ on a `.drv` file itself instead of its outputs.
+
+* There is a new store called `dummy://` that does not support
+ building or adding paths. This is useful if you want to use the Nix
+ evaluator but don't have a Nix store.
+
+* The `ssh-ng://` store now allows substituting paths on the remote,
+ as `ssh://` already did.
+
+* When auto-calling a function with an ellipsis, all arguments are now
+ passed.
+
+* New `nix-shell` features:
+
+ - It preserves the `PS1` environment variable if
+ `NIX_SHELL_PRESERVE_PROMPT` is set.
+
+ - With `-p`, it passes any `--arg`s as Nixpkgs arguments.
+
+ - Support for structured attributes.
+
+* `nix-prefetch-url` has a new `--executable` flag.
+
+* On `x86_64` systems, [`x86_64` microarchitecture
+ levels](https://lwn.net/Articles/844831/) are mapped to additional
+ system types (e.g. `x86_64-v1-linux`).
+
+* The new `--eval-store` flag allows you to use a different store for
+ evaluation than for building or storing the build result. This is
+ primarily useful when you want to query whether something exists in
+ a read-only store, such as a binary cache:
+
+ ```
+ # nix path-info --json --store https://cache.nixos.org \
+ --eval-store auto nixpkgs#hello
+ ```
+
+ (Here `auto` indicates the local store.)
+
+* The Nix daemon has a new low-latency mechanism for copying
+ closures. This is useful when building on remote stores such as
+ `ssh-ng://`.
+
+* Plugins can now register `nix` subcommands.
+
+* The `--indirect` flag to `nix-store --add-root` has become a no-op.
+ `--add-root` will always generate indirect GC roots from now on.
+
+## Incompatible changes
+
+* The `nix` command is now marked as an experimental feature. This
+ means that you need to add
+
+ ```
+ experimental-features = nix-command
+ ```
+
+ to your `nix.conf` if you want to use it, or pass
+ `--extra-experimental-features nix-command` on the command line.
+
+* The `nix` command no longer has a syntax for referring to packages
+ in a channel. This means that the following no longer works:
+
+ ```console
+ nix build nixpkgs.hello # Nix 2.3
+ ```
+
+ Instead, you can either use the `#` syntax to select a package from
+ a flake, e.g.
+
+ ```console
+ nix build nixpkgs#hello
+ ```
+
+ Or, if you want to use the `nixpkgs` channel in the `NIX_PATH`
+ environment variable:
+
+ ```console
+ nix build -f '' hello
+ ```
+
+* The old `nix run` has been renamed to `nix shell`, while there is a
+ new `nix run` that runs a default command. So instead of
+
+ ```console
+ nix run nixpkgs.hello -c hello # Nix 2.3
+ ```
+
+ you should use
+
+ ```console
+ nix shell nixpkgs#hello -c hello
+ ```
+
+ or just
+
+ ```console
+ nix run nixpkgs#hello
+ ```
+
+ if the command you want to run has the same name as the package.
+
+* It is now an error to modify the `plugin-files` setting via a
+ command-line flag that appears after the first non-flag argument to
+ any command, including a subcommand to `nix`. For example,
+ `nix-instantiate default.nix --plugin-files ""` must now become
+ `nix-instantiate --plugin-files "" default.nix`.
+
+* We no longer release source tarballs. If you want to build from
+ source, please build from the tags in the Git repository.
+
+## Contributors
+
+This release has contributions from
+Adam Höse,
+Albert Safin,
+Alex Kovar,
+Alex Zero,
+Alexander Bantyev,
+Alexandre Esteves,
+Alyssa Ross,
+Anatole Lucet,
+Anders Kaseorg,
+Andreas Rammhold,
+Antoine Eiche,
+Antoine Martin,
+Arnout Engelen,
+Arthur Gautier,
+aszlig,
+Ben Burdette,
+Benjamin Hipple,
+Bernardo Meurer,
+Björn Gohla,
+Bjørn Forsman,
+Bob van der Linden,
+Brian Leung,
+Brian McKenna,
+Brian Wignall,
+Bruce Toll,
+Bryan Richter,
+Calle Rosenquist,
+Calvin Loncaric,
+Carlo Nucera,
+Carlos D'Agostino,
+Chaz Schlarp,
+Christian Höppner,
+Christian Kampka,
+Chua Hou,
+Chuck,
+Cole Helbling,
+Daiderd Jordan,
+Dan Callahan,
+Dani,
+Daniel Fitzpatrick,
+Danila Fedorin,
+Daniël de Kok,
+Danny Bautista,
+DavHau,
+David McFarland,
+Dima,
+Domen Kožar,
+Dominik Schrempf,
+Dominique Martinet,
+dramforever,
+Dustin DeWeese,
+edef,
+Eelco Dolstra,
+Ellie Hermaszewska,
+Emilio Karakey,
+Emily,
+Eric Culp,
+Ersin Akinci,
+Fabian Möller,
+Farid Zakaria,
+Federico Pellegrin,
+Finn Behrens,
+Florian Franzen,
+Félix Baylac-Jacqué,
+Gabriella Gonzalez,
+Geoff Reedy,
+Georges Dubus,
+Graham Christensen,
+Greg Hale,
+Greg Price,
+Gregor Kleen,
+Gregory Hale,
+Griffin Smith,
+Guillaume Bouchard,
+Harald van Dijk,
+illustris,
+Ivan Zvonimir Horvat,
+Jade,
+Jake Waksbaum,
+jakobrs,
+James Ottaway,
+Jan Tojnar,
+Janne Heß,
+Jaroslavas Pocepko,
+Jarrett Keifer,
+Jeremy Schlatter,
+Joachim Breitner,
+Joe Pea,
+John Ericson,
+Jonathan Ringer,
+Josef Kemetmüller,
+Joseph Lucas,
+Jude Taylor,
+Julian Stecklina,
+Julien Tanguy,
+Jörg Thalheim,
+Kai Wohlfahrt,
+keke,
+Keshav Kini,
+Kevin Quick,
+Kevin Stock,
+Kjetil Orbekk,
+Krzysztof Gogolewski,
+kvtb,
+Lars Mühmel,
+Leonhard Markert,
+Lily Ballard,
+Linus Heckemann,
+Lorenzo Manacorda,
+Lucas Desgouilles,
+Lucas Franceschino,
+Lucas Hoffmann,
+Luke Granger-Brown,
+Madeline Haraj,
+Marwan Aljubeh,
+Mat Marini,
+Mateusz Piotrowski,
+Matthew Bauer,
+Matthew Kenigsberg,
+Mauricio Scheffer,
+Maximilian Bosch,
+Michael Adler,
+Michael Bishop,
+Michael Fellinger,
+Michael Forney,
+Michael Reilly,
+mlatus,
+Mykola Orliuk,
+Nathan van Doorn,
+Naïm Favier,
+ng0,
+Nick Van den Broeck,
+Nicolas Stig124 Formichella,
+Niels Egberts,
+Niklas Hambüchen,
+Nikola Knezevic,
+oxalica,
+p01arst0rm,
+Pamplemousse,
+Patrick Hilhorst,
+Paul Opiyo,
+Pavol Rusnak,
+Peter Kolloch,
+Philipp Bartsch,
+Philipp Middendorf,
+Piotr Szubiakowski,
+Profpatsch,
+Puck Meerburg,
+Ricardo M. Correia,
+Rickard Nilsson,
+Robert Hensing,
+Robin Gloster,
+Rodrigo,
+Rok Garbas,
+Ronnie Ebrin,
+Rovanion Luckey,
+Ryan Burns,
+Ryan Mulligan,
+Ryne Everett,
+Sam Doshi,
+Sam Lidder,
+Samir Talwar,
+Samuel Dionne-Riel,
+Sebastian Ullrich,
+Sergei Trofimovich,
+Sevan Janiyan,
+Shao Cheng,
+Shea Levy,
+Silvan Mosberger,
+Stefan Frijters,
+Stefan Jaax,
+sternenseemann,
+Steven Shaw,
+Stéphan Kochen,
+SuperSandro2000,
+Suraj Barkale,
+Taeer Bar-Yam,
+Thomas Churchman,
+Théophane Hufschmitt,
+Timothy DeHerrera,
+Timothy Klim,
+Tobias Möst,
+Tobias Pflug,
+Tom Bereknyei,
+Travis A. Everett,
+Ujjwal Jain,
+Vladimír Čunát,
+Wil Taylor,
+Will Dietz,
+Yaroslav Bolyukin,
+Yestin L. Harrison,
+YI,
+Yorick van Pelt,
+Yuriy Taraday and
+zimbatm.
diff --git a/doc/manual/src/release-notes/rl-2.5.md b/doc/manual/src/release-notes/rl-2.5.md
new file mode 100644
index 000000000..dd6fd3b0f
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.5.md
@@ -0,0 +1,16 @@
+# Release 2.5 (2021-12-13)
+
+* The garbage collector no longer blocks new builds, so the message
+ `waiting for the big garbage collector lock...` is a thing of the
+ past.
+
+* Binary cache stores now have a setting `compression-level`.
+
+* `nix develop` now has a flag `--unpack` to run `unpackPhase`.
+
+* Lists can now be compared lexicographically using the `<` operator.
+
+* New built-in function: `builtins.groupBy`, with the same functionality as
+ Nixpkgs' `lib.groupBy`, but faster.
+
+* `nix repl` now has a `:log` command.
diff --git a/doc/manual/src/release-notes/rl-2.6.md b/doc/manual/src/release-notes/rl-2.6.md
new file mode 100644
index 000000000..280faead1
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.6.md
@@ -0,0 +1,21 @@
+# Release 2.6 (2022-01-24)
+
+* The Nix CLI now searches for a `flake.nix` up until the root of the current
+ Git repository or a filesystem boundary rather than just in the current
+ directory.
+* The TOML parser used by `builtins.fromTOML` has been replaced by [a
+ more compliant one](https://github.com/ToruNiina/toml11).
+* Added `:st`/`:show-trace` commands to `nix repl`, which are used to
+ set or toggle display of error traces.
+* New builtin function `builtins.zipAttrsWith` with the same
+ functionality as `lib.zipAttrsWith` from Nixpkgs, but much more
+ efficient.
+* New command `nix store copy-log` to copy build logs from one store
+ to another.
+* The `commit-lockfile-summary` option can be set to a non-empty
+ string to override the commit summary used when commiting an updated
+ lockfile. This may be used in conjunction with the `nixConfig`
+ attribute in `flake.nix` to better conform to repository
+ conventions.
+* `docker run -ti nixos/nix:master` will place you in the Docker
+ container with the latest version of Nix from the `master` branch.
diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md
new file mode 100644
index 000000000..7dd8387d8
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-next.md
@@ -0,0 +1,28 @@
+# Release X.Y (202?-??-??)
+
+* A number of "default" flake output attributes have been
+ renamed. These are:
+
+ * `defaultPackage.` → `packages..default`
+ * `defaultApps.` → `apps..default`
+ * `defaultTemplate` → `templates.default`
+ * `defaultBundler.` → `bundlers..default`
+ * `overlay` → `overlays.default`
+ * `devShell.` → `devShells..default`
+
+ The old flake output attributes still work, but `nix flake check`
+ will warn about them.
+
+* `nix bundle` breaking API change now supports bundlers of the form
+ `bundler..= derivation: another-derivation;`. This supports
+ additional functionality to inspect evaluation information during bundling. A
+ new [repository](https://github.com/NixOS/bundlers) has various bundlers
+ implemented.
+
+* `nix store ping` now reports the version of the remote Nix daemon.
+
+* `nix flake {init,new}` now display information about which files have been
+ created.
+
+* Templates can now define a `welcomeText` attribute, which is printed out by
+ `nix flake {init,new} --template `.
diff --git a/docker.nix b/docker.nix
new file mode 100644
index 000000000..251bd2f46
--- /dev/null
+++ b/docker.nix
@@ -0,0 +1,264 @@
+{ pkgs ? import { }
+, lib ? pkgs.lib
+, name ? "nix"
+, tag ? "latest"
+, channelName ? "nixpkgs"
+, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
+}:
+let
+ defaultPkgs = with pkgs; [
+ nix
+ bashInteractive
+ coreutils-full
+ gnutar
+ gzip
+ gnugrep
+ which
+ curl
+ less
+ wget
+ man
+ cacert.out
+ findutils
+ iana-etc
+ git
+ ];
+
+ users = {
+
+ root = {
+ uid = 0;
+ shell = "/bin/bash";
+ home = "/root";
+ gid = 0;
+ };
+
+ } // lib.listToAttrs (
+ map
+ (
+ n: {
+ name = "nixbld${toString n}";
+ value = {
+ uid = 30000 + n;
+ gid = 30000;
+ groups = [ "nixbld" ];
+ description = "Nix build user ${toString n}";
+ };
+ }
+ )
+ (lib.lists.range 1 32)
+ );
+
+ groups = {
+ root.gid = 0;
+ nixbld.gid = 30000;
+ };
+
+ userToPasswd = (
+ k:
+ { uid
+ , gid ? 65534
+ , home ? "/var/empty"
+ , description ? ""
+ , shell ? "/bin/false"
+ , groups ? [ ]
+ }: "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
+ );
+ passwdContents = (
+ lib.concatStringsSep "\n"
+ (lib.attrValues (lib.mapAttrs userToPasswd users))
+ );
+
+ userToShadow = k: { ... }: "${k}:!:1::::::";
+ shadowContents = (
+ lib.concatStringsSep "\n"
+ (lib.attrValues (lib.mapAttrs userToShadow users))
+ );
+
+ # Map groups to members
+ # {
+ # group = [ "user1" "user2" ];
+ # }
+ groupMemberMap = (
+ let
+ # Create a flat list of user/group mappings
+ mappings = (
+ builtins.foldl'
+ (
+ acc: user:
+ let
+ groups = users.${user}.groups or [ ];
+ in
+ acc ++ map
+ (group: {
+ inherit user group;
+ })
+ groups
+ )
+ [ ]
+ (lib.attrNames users)
+ );
+ in
+ (
+ builtins.foldl'
+ (
+ acc: v: acc // {
+ ${v.group} = acc.${v.group} or [ ] ++ [ v.user ];
+ }
+ )
+ { }
+ mappings)
+ );
+
+ groupToGroup = k: { gid }:
+ let
+ members = groupMemberMap.${k} or [ ];
+ in
+ "${k}:x:${toString gid}:${lib.concatStringsSep "," members}";
+ groupContents = (
+ lib.concatStringsSep "\n"
+ (lib.attrValues (lib.mapAttrs groupToGroup groups))
+ );
+
+ nixConf = {
+ sandbox = "false";
+ build-users-group = "nixbld";
+ trusted-public-keys = "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=";
+ };
+ nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: "${n} = ${v}") nixConf)) + "\n";
+
+ baseSystem =
+ let
+ nixpkgs = pkgs.path;
+ channel = pkgs.runCommand "channel-nixos" { } ''
+ mkdir $out
+ ln -s ${nixpkgs} $out/nixpkgs
+ echo "[]" > $out/manifest.nix
+ '';
+ rootEnv = pkgs.buildPackages.buildEnv {
+ name = "root-profile-env";
+ paths = defaultPkgs;
+ };
+ manifest = pkgs.buildPackages.runCommand "manifest.nix" { } ''
+ cat > $out < $out/etc/passwd
+ echo "" >> $out/etc/passwd
+
+ cat $groupContentsPath > $out/etc/group
+ echo "" >> $out/etc/group
+
+ cat $shadowContentsPath > $out/etc/shadow
+ echo "" >> $out/etc/shadow
+
+ mkdir -p $out/usr
+ ln -s /nix/var/nix/profiles/share $out/usr/
+
+ mkdir -p $out/nix/var/nix/gcroots
+
+ mkdir $out/tmp
+
+ mkdir -p $out/var/tmp
+
+ mkdir -p $out/etc/nix
+ cat $nixConfContentsPath > $out/etc/nix/nix.conf
+
+ mkdir -p $out/root
+ mkdir -p $out/nix/var/nix/profiles/per-user/root
+
+ ln -s ${profile} $out/nix/var/nix/profiles/default-1-link
+ ln -s $out/nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
+ ln -s /nix/var/nix/profiles/default $out/root/.nix-profile
+
+ ln -s ${channel} $out/nix/var/nix/profiles/per-user/root/channels-1-link
+ ln -s $out/nix/var/nix/profiles/per-user/root/channels-1-link $out/nix/var/nix/profiles/per-user/root/channels
+
+ mkdir -p $out/root/.nix-defexpr
+ ln -s $out/nix/var/nix/profiles/per-user/root/channels $out/root/.nix-defexpr/channels
+ echo "${channelURL} ${channelName}" > $out/root/.nix-channels
+
+ mkdir -p $out/bin $out/usr/bin
+ ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
+ ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
+ '';
+
+in
+pkgs.dockerTools.buildLayeredImageWithNixDb {
+
+ inherit name tag;
+
+ contents = [ baseSystem ];
+
+ extraCommands = ''
+ rm -rf nix-support
+ ln -s /nix/var/nix/profiles nix/var/nix/gcroots/profiles
+ '';
+ fakeRootCommands = ''
+ chmod 1777 tmp
+ chmod 1777 var/tmp
+ '';
+
+ config = {
+ Cmd = [ "/root/.nix-profile/bin/bash" ];
+ Env = [
+ "USER=root"
+ "PATH=${lib.concatStringsSep ":" [
+ "/root/.nix-profile/bin"
+ "/nix/var/nix/profiles/default/bin"
+ "/nix/var/nix/profiles/default/sbin"
+ ]}"
+ "MANPATH=${lib.concatStringsSep ":" [
+ "/root/.nix-profile/share/man"
+ "/nix/var/nix/profiles/default/share/man"
+ ]}"
+ "SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
+ "GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
+ "NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
+ "NIX_PATH=/nix/var/nix/profiles/per-user/root/channels:/root/.nix-defexpr/channels"
+ ];
+ };
+
+}
diff --git a/flake.lock b/flake.lock
index d562728e7..61eccb73c 100644
--- a/flake.lock
+++ b/flake.lock
@@ -3,27 +3,26 @@
"lowdown-src": {
"flake": false,
"locked": {
- "lastModified": 1628247802,
- "narHash": "sha256-4XSXGYvKqogR7bubyqYNwBHYCtrIn6XRGXj6+u+BXNs=",
+ "lastModified": 1633514407,
+ "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
"owner": "kristapsdz",
"repo": "lowdown",
- "rev": "b4483d0ef85990f54b864158ab786b4a5b3904fa",
+ "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
"type": "github"
},
"original": {
"owner": "kristapsdz",
- "ref": "VERSION_0_8_6",
"repo": "lowdown",
"type": "github"
}
},
"nixpkgs": {
"locked": {
- "lastModified": 1628689438,
- "narHash": "sha256-YMINW6YmubHZVdliGsAJpnnMYXRrvppv59LgwtnyYhs=",
+ "lastModified": 1632864508,
+ "narHash": "sha256-d127FIvGR41XbVRDPVvozUPQ/uRHbHwvfyKHwEt5xFM=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "f6551e1efa261568c82b76c3a582b2c2ceb1f53f",
+ "rev": "82891b5e2c2359d7e58d08849e4c89511ab94234",
"type": "github"
},
"original": {
@@ -32,10 +31,26 @@
"type": "indirect"
}
},
+ "nixpkgs-regression": {
+ "locked": {
+ "lastModified": 1643052045,
+ "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
+ "type": "github"
+ },
+ "original": {
+ "id": "nixpkgs",
+ "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
+ "type": "indirect"
+ }
+ },
"root": {
"inputs": {
"lowdown-src": "lowdown-src",
- "nixpkgs": "nixpkgs"
+ "nixpkgs": "nixpkgs",
+ "nixpkgs-regression": "nixpkgs-regression"
}
}
},
diff --git a/flake.nix b/flake.nix
index 9b737b850..87b00edf4 100644
--- a/flake.nix
+++ b/flake.nix
@@ -2,9 +2,10 @@
description = "The purely functional package manager";
inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
- inputs.lowdown-src = { url = "github:kristapsdz/lowdown/VERSION_0_8_6"; flake = false; };
+ inputs.nixpkgs-regression.url = "nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
+ inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
- outputs = { self, nixpkgs, lowdown-src }:
+ outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src }:
let
@@ -22,15 +23,36 @@
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
+ stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" ];
+
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
+ forAllSystemsAndStdenvs = f: forAllSystems (system:
+ nixpkgs.lib.listToAttrs
+ (map
+ (n:
+ nixpkgs.lib.nameValuePair "${n}Packages" (
+ f system n
+ )) stdenvs
+ )
+ );
+
+ forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
# Memoize nixpkgs for different platforms for efficiency.
- nixpkgsFor = forAllSystems (system:
- import nixpkgs {
- inherit system;
- overlays = [ self.overlay ];
- }
- );
+ nixpkgsFor =
+ let stdenvsPackages = forAllSystemsAndStdenvs
+ (system: stdenv:
+ import nixpkgs {
+ inherit system;
+ overlays = [
+ (overlayFor (p: p.${stdenv}))
+ ];
+ }
+ );
+ in
+ # Add the `stdenvPackages` at toplevel, both because these are the ones
+ # we want most of the time and for backwards compatibility
+ forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
commonDeps = pkgs: with pkgs; rec {
# Use "busybox-sandbox-shell" if present,
@@ -61,6 +83,7 @@
configureFlags =
lib.optionals stdenv.isLinux [
+ "--with-boost=${boost}/lib"
"--with-sandbox-shell=${sh}/bin/busybox"
"LDFLAGS=-fuse-ld=gold"
];
@@ -70,15 +93,15 @@
[
buildPackages.bison
buildPackages.flex
- (lib.getBin buildPackages.lowdown)
+ (lib.getBin buildPackages.lowdown-nix)
buildPackages.mdbook
buildPackages.autoconf-archive
buildPackages.autoreconfHook
- buildPackages.pkgconfig
+ buildPackages.pkg-config
# Tests
buildPackages.git
- buildPackages.mercurial
+ buildPackages.mercurial # FIXME: remove? only needed for tests
buildPackages.jq
]
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
@@ -89,8 +112,8 @@
openssl sqlite
libarchive
boost
- lowdown
- gmock
+ lowdown-nix
+ gtest
]
++ lib.optionals stdenv.isLinux [libseccomp]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
@@ -110,6 +133,7 @@
./boehmgc-coroutine-sp-fallback.diff
];
}))
+ nlohmann_json
];
perlDeps =
@@ -118,16 +142,15 @@
];
};
- installScriptFor = systems:
- with nixpkgsFor.x86_64-linux;
+ installScriptFor = systems:
+ with nixpkgsFor.x86_64-linux;
runCommand "installer-script"
{ buildInputs = [ nix ];
}
''
mkdir -p $out/nix-support
- # Converts /nix/store/50p3qk8kka9dl6wyq40vydq945k0j3kv-nix-2.4pre20201102_550e11f/bin/nix
- # To 50p3qk8kka9dl6wyq40vydq945k0j3kv/bin/nix
+ # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
tarballPath() {
# Remove the store prefix
local path=''${1#${builtins.storeDir}/}
@@ -153,13 +176,15 @@
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
- testNixVersions = pkgs: client: daemon: with commonDeps pkgs; pkgs.stdenv.mkDerivation {
+ testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation {
NIX_DAEMON_PACKAGE = daemon;
NIX_CLIENT_PACKAGE = client;
- # Must keep this name short as OSX has a rather strict limit on the
- # socket path length, and this name appears in the path of the
- # nix-daemon socket used in the tests
- name = "nix-tests";
+ name =
+ "nix-tests"
+ + optionalString
+ (versionAtLeast daemon.version "2.4pre20211005" &&
+ versionAtLeast client.version "2.4pre20211005")
+ "-${client.version}-against-${daemon.version}";
inherit version;
src = self;
@@ -182,198 +207,209 @@
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
};
- binaryTarball = buildPackages: nix: pkgs: let
- inherit (pkgs) cacert;
- installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
- in
+ binaryTarball = buildPackages: nix: pkgs:
+ let
+ inherit (pkgs) cacert;
+ installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
+ in
- buildPackages.runCommand "nix-binary-tarball-${version}"
- { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
- meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
- }
- ''
- cp ${installerClosureInfo}/registration $TMPDIR/reginfo
- cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
- substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
+ buildPackages.runCommand "nix-binary-tarball-${version}"
+ { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
+ meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
+ }
+ ''
+ cp ${installerClosureInfo}/registration $TMPDIR/reginfo
+ cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
+ substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
- substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
- substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
- substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
+ substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
+ substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
+ substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
- if type -p shellcheck; then
- # SC1090: Don't worry about not being able to find
- # $nix/etc/profile.d/nix.sh
- shellcheck --exclude SC1090 $TMPDIR/install
- shellcheck $TMPDIR/create-darwin-volume.sh
- shellcheck $TMPDIR/install-darwin-multi-user.sh
- shellcheck $TMPDIR/install-systemd-multi-user.sh
+ if type -p shellcheck; then
+ # SC1090: Don't worry about not being able to find
+ # $nix/etc/profile.d/nix.sh
+ shellcheck --exclude SC1090 $TMPDIR/install
+ shellcheck $TMPDIR/create-darwin-volume.sh
+ shellcheck $TMPDIR/install-darwin-multi-user.sh
+ shellcheck $TMPDIR/install-systemd-multi-user.sh
- # SC1091: Don't panic about not being able to source
- # /etc/profile
- # SC2002: Ignore "useless cat" "error", when loading
- # .reginfo, as the cat is a much cleaner
- # implementation, even though it is "useless"
- # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
- # root's home directory
- shellcheck --external-sources \
- --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
- fi
+ # SC1091: Don't panic about not being able to source
+ # /etc/profile
+ # SC2002: Ignore "useless cat" "error", when loading
+ # .reginfo, as the cat is a much cleaner
+ # implementation, even though it is "useless"
+ # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
+ # root's home directory
+ shellcheck --external-sources \
+ --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
+ fi
- chmod +x $TMPDIR/install
- chmod +x $TMPDIR/create-darwin-volume.sh
- chmod +x $TMPDIR/install-darwin-multi-user.sh
- chmod +x $TMPDIR/install-systemd-multi-user.sh
- chmod +x $TMPDIR/install-multi-user
- dir=nix-${version}-${pkgs.system}
- fn=$out/$dir.tar.xz
- mkdir -p $out/nix-support
- echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
- tar cvfJ $fn \
- --owner=0 --group=0 --mode=u+rw,uga+r \
- --absolute-names \
- --hard-dereference \
- --transform "s,$TMPDIR/install,$dir/install," \
- --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
- --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
- --transform "s,$NIX_STORE,$dir/store,S" \
- $TMPDIR/install \
- $TMPDIR/create-darwin-volume.sh \
- $TMPDIR/install-darwin-multi-user.sh \
- $TMPDIR/install-systemd-multi-user.sh \
- $TMPDIR/install-multi-user \
- $TMPDIR/reginfo \
- $(cat ${installerClosureInfo}/store-paths)
- '';
+ chmod +x $TMPDIR/install
+ chmod +x $TMPDIR/create-darwin-volume.sh
+ chmod +x $TMPDIR/install-darwin-multi-user.sh
+ chmod +x $TMPDIR/install-systemd-multi-user.sh
+ chmod +x $TMPDIR/install-multi-user
+ dir=nix-${version}-${pkgs.system}
+ fn=$out/$dir.tar.xz
+ mkdir -p $out/nix-support
+ echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
+ tar cvfJ $fn \
+ --owner=0 --group=0 --mode=u+rw,uga+r \
+ --absolute-names \
+ --hard-dereference \
+ --transform "s,$TMPDIR/install,$dir/install," \
+ --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
+ --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
+ --transform "s,$NIX_STORE,$dir/store,S" \
+ $TMPDIR/install \
+ $TMPDIR/create-darwin-volume.sh \
+ $TMPDIR/install-darwin-multi-user.sh \
+ $TMPDIR/install-systemd-multi-user.sh \
+ $TMPDIR/install-multi-user \
+ $TMPDIR/reginfo \
+ $(cat ${installerClosureInfo}/store-paths)
+ '';
+
+ overlayFor = getStdenv: final: prev:
+ let currentStdenv = getStdenv final; in
+ {
+ nixStable = prev.nix;
+
+ # Forward from the previous stage as we don’t want it to pick the lowdown override
+ nixUnstable = prev.nixUnstable;
+
+ nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
+ name = "nix-${version}";
+ inherit version;
+
+ src = self;
+
+ VERSION_SUFFIX = versionSuffix;
+
+ outputs = [ "out" "dev" "doc" ];
+
+ nativeBuildInputs = nativeBuildDeps;
+ buildInputs = buildDeps ++ awsDeps;
+
+ propagatedBuildInputs = propagatedDeps;
+
+ disallowedReferences = [ boost ];
+
+ preConfigure =
+ ''
+ # Copy libboost_context so we don't get all of Boost in our closure.
+ # https://github.com/NixOS/nixpkgs/issues/45462
+ mkdir -p $out/lib
+ cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
+ rm -f $out/lib/*.a
+ ${lib.optionalString currentStdenv.isLinux ''
+ chmod u+w $out/lib/*.so.*
+ patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
+ ''}
+ ${lib.optionalString currentStdenv.isDarwin ''
+ for LIB in $out/lib/*.dylib; do
+ chmod u+w $LIB
+ install_name_tool -id $LIB $LIB
+ done
+ install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
+ ''}
+ '';
+
+ configureFlags = configureFlags ++
+ [ "--sysconfdir=/etc" ];
+
+ enableParallelBuilding = true;
+
+ makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
+
+ doCheck = true;
+
+ installFlags = "sysconfdir=$(out)/etc";
+
+ postInstall = ''
+ mkdir -p $doc/nix-support
+ echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
+ ${lib.optionalString currentStdenv.isDarwin ''
+ install_name_tool \
+ -change ${boost}/lib/libboost_context.dylib \
+ $out/lib/libboost_context.dylib \
+ $out/lib/libnixutil.dylib
+ ''}
+ '';
+
+ doInstallCheck = true;
+ installCheckFlags = "sysconfdir=$(out)/etc";
+
+ separateDebugInfo = true;
+
+ strictDeps = true;
+
+ passthru.perl-bindings = with final; currentStdenv.mkDerivation {
+ name = "nix-perl-${version}";
+
+ src = self;
+
+ nativeBuildInputs =
+ [ buildPackages.autoconf-archive
+ buildPackages.autoreconfHook
+ buildPackages.pkg-config
+ ];
+
+ buildInputs =
+ [ nix
+ curl
+ bzip2
+ xz
+ pkgs.perl
+ boost
+ ]
+ ++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
+ ++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
+
+ configureFlags = ''
+ --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
+ --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
+ '';
+
+ enableParallelBuilding = true;
+
+ postUnpack = "sourceRoot=$sourceRoot/perl";
+ };
+
+ };
+
+ lowdown-nix = with final; currentStdenv.mkDerivation rec {
+ name = "lowdown-0.9.0";
+
+ src = lowdown-src;
+
+ outputs = [ "out" "bin" "dev" ];
+
+ nativeBuildInputs = [ buildPackages.which ];
+
+ configurePhase = ''
+ ${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
+ ./configure \
+ PREFIX=${placeholder "dev"} \
+ BINDIR=${placeholder "bin"}/bin
+ '';
+ };
+ };
in {
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
- overlay = final: prev: {
-
- # An older version of Nix to test against when using the daemon.
- # Currently using `nixUnstable` as the stable one doesn't respect
- # `NIX_DAEMON_SOCKET_PATH` which is needed for the tests.
- nixStable = prev.nix;
-
- nix = with final; with commonDeps pkgs; stdenv.mkDerivation {
- name = "nix-${version}";
- inherit version;
-
- src = self;
-
- VERSION_SUFFIX = versionSuffix;
-
- outputs = [ "out" "dev" "doc" ];
-
- nativeBuildInputs = nativeBuildDeps;
- buildInputs = buildDeps ++ awsDeps;
-
- propagatedBuildInputs = propagatedDeps;
-
- preConfigure =
- ''
- # Copy libboost_context so we don't get all of Boost in our closure.
- # https://github.com/NixOS/nixpkgs/issues/45462
- mkdir -p $out/lib
- cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
- rm -f $out/lib/*.a
- ${lib.optionalString stdenv.isLinux ''
- chmod u+w $out/lib/*.so.*
- patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
- ''}
- '';
-
- configureFlags = configureFlags ++
- [ "--sysconfdir=/etc" ];
-
- enableParallelBuilding = true;
-
- makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
-
- doCheck = true;
-
- installFlags = "sysconfdir=$(out)/etc";
-
- postInstall = ''
- mkdir -p $doc/nix-support
- echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
- '';
-
- doInstallCheck = true;
- installCheckFlags = "sysconfdir=$(out)/etc";
-
- separateDebugInfo = true;
-
- strictDeps = true;
-
- passthru.perl-bindings = with final; stdenv.mkDerivation {
- name = "nix-perl-${version}";
-
- src = self;
-
- nativeBuildInputs =
- [ buildPackages.autoconf-archive
- buildPackages.autoreconfHook
- buildPackages.pkgconfig
- ];
-
- buildInputs =
- [ nix
- curl
- bzip2
- xz
- pkgs.perl
- boost
- ]
- ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
- ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
-
- configureFlags = ''
- --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
- --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
- '';
-
- enableParallelBuilding = true;
-
- postUnpack = "sourceRoot=$sourceRoot/perl";
- };
-
- };
-
- lowdown = with final; stdenv.mkDerivation rec {
- name = "lowdown-0.8.6";
-
- /*
- src = fetchurl {
- url = "https://kristaps.bsd.lv/lowdown/snapshots/${name}.tar.gz";
- hash = "sha512-U9WeGoInT9vrawwa57t6u9dEdRge4/P+0wLxmQyOL9nhzOEUU2FRz2Be9H0dCjYE7p2v3vCXIYk40M+jjULATw==";
- };
- */
-
- src = lowdown-src;
-
- outputs = [ "out" "bin" "dev" ];
-
- nativeBuildInputs = [ buildPackages.which ];
-
- configurePhase = ''
- ${if (stdenv.isDarwin && stdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
- ./configure \
- PREFIX=${placeholder "dev"} \
- BINDIR=${placeholder "bin"}/bin
- '';
- };
-
- };
+ overlay = overlayFor (p: p.stdenv);
hydraJobs = {
@@ -410,6 +446,9 @@
installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ];
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
+ # docker image with Nix inside
+ dockerImage = nixpkgs.lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
+
# Line coverage analysis.
coverage =
with nixpkgsFor.x86_64-linux;
@@ -450,12 +489,24 @@
inherit (self) overlay;
};
+ tests.nssPreload = (import ./tests/nss-preload.nix rec {
+ system = "x86_64-linux";
+ inherit nixpkgs;
+ inherit (self) overlay;
+ });
+
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
});
+ tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
+ system = "x86_64-linux";
+ inherit nixpkgs;
+ inherit (self) overlay;
+ });
+
tests.setuid = nixpkgs.lib.genAttrs
["i686-linux" "x86_64-linux"]
(system:
@@ -464,44 +515,46 @@
inherit (self) overlay;
});
- /*
- # Check whether we can still evaluate all of Nixpkgs.
+ # Make sure that nix-env still produces the exact same result
+ # on a particular version of Nixpkgs.
tests.evalNixpkgs =
- import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
- # FIXME: fix pkgs/top-level/make-tarball.nix in NixOS to not require a revCount.
- inherit nixpkgs;
- pkgs = nixpkgsFor.x86_64-linux;
- officialRelease = false;
- };
-
- # Check whether we can still evaluate NixOS.
- tests.evalNixOS =
with nixpkgsFor.x86_64-linux;
runCommand "eval-nixos" { buildInputs = [ nix ]; }
''
- export NIX_STATE_DIR=$TMPDIR
-
- nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
- --arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
-
- touch $out
+ type -p nix-env
+ # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
+ time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
+ [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
+ mkdir $out
'';
- */
+
+ metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
+ pkgs = nixpkgsFor.x86_64-linux;
+ nixpkgs = nixpkgs-regression;
+ };
+
+ installTests = forAllSystems (system:
+ let pkgs = nixpkgsFor.${system}; in
+ pkgs.runCommand "install-tests" {
+ againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix;
+ againstCurrentUnstable =
+ # FIXME: temporarily disable this on macOS because of #3605.
+ if system == "x86_64-linux"
+ then testNixVersions pkgs pkgs.nix pkgs.nixUnstable
+ else null;
+ # Disabled because the latest stable version doesn't handle
+ # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
+ # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
+ } "touch $out");
};
checks = forAllSystems (system: {
binaryTarball = self.hydraJobs.binaryTarball.${system};
perlBindings = self.hydraJobs.perlBindings.${system};
- installTests =
- let pkgs = nixpkgsFor.${system}; in
- pkgs.runCommand "install-tests" {
- againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix;
- againstCurrentUnstable = testNixVersions pkgs pkgs.nix pkgs.nixUnstable;
- # Disabled because the latest stable version doesn't handle
- # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
- # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
- } "touch $out";
+ installTests = self.hydraJobs.installTests.${system};
+ } // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
+ dockerImage = self.hydraJobs.dockerImage.${system};
});
packages = forAllSystems (system: {
@@ -547,6 +600,20 @@
hardeningDisable = [ "pie" ];
};
+ dockerImage =
+ let
+ pkgs = nixpkgsFor.${system};
+ image = import ./docker.nix { inherit pkgs; tag = version; };
+ in
+ pkgs.runCommand
+ "docker-image-tarball-${version}"
+ { meta.description = "Docker image with Nix for ${system}"; }
+ ''
+ mkdir -p $out/nix-support
+ image=$out/image.tar.gz
+ ln -s ${image} $image
+ echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
+ '';
} // builtins.listToAttrs (map (crossSystem: {
name = "nix-${crossSystem}";
value = let
@@ -586,15 +653,21 @@
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
};
- }) crossSystems)));
+ }) crossSystems)) // (builtins.listToAttrs (map (stdenvName:
+ nixpkgsFor.${system}.lib.nameValuePair
+ "nix-${stdenvName}"
+ nixpkgsFor.${system}."${stdenvName}Packages".nix
+ ) stdenvs)));
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
- devShell = forAllSystems (system:
+ devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages);
+
+ devShells = forAllSystemsAndStdenvs (system: stdenv:
with nixpkgsFor.${system};
with commonDeps pkgs;
- stdenv.mkDerivation {
+ nixpkgsFor.${system}.${stdenv}.mkDerivation {
name = "nix";
outputs = [ "out" "dev" "doc" ];
@@ -613,6 +686,9 @@
PATH=$prefix/bin:$PATH
unset PYTHONPATH
export MANPATH=$out/share/man:$MANPATH
+
+ # Make bash completion work.
+ XDG_DATA_DIRS+=:$out/share
'';
});
diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl
index 66ccf1443..d3ef63db8 100755
--- a/maintainers/upload-release.pl
+++ b/maintainers/upload-release.pl
@@ -19,6 +19,8 @@ my $nixpkgsDir = "/home/eelco/Dev/nixpkgs-pristine";
my $TMPDIR = $ENV{'TMPDIR'} // "/tmp";
+my $isLatest = ($ENV{'IS_LATEST'} // "") eq "1";
+
# FIXME: cut&paste from nixos-channel-scripts.
sub fetch {
my ($url, $type) = @_;
@@ -35,22 +37,29 @@ sub fetch {
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
#print Dumper($evalInfo);
+my $flakeUrl = $evalInfo->{flake} or die;
+my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die);
+my $nixRev = $flakeInfo->{revision} or die;
-my $nixRev = $evalInfo->{jobsetevalinputs}->{nix}->{revision} or die;
+my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
+#print Dumper($buildInfo);
-my $tarballInfo = decode_json(fetch("$evalUrl/job/tarball", 'application/json'));
-
-my $releaseName = $tarballInfo->{releasename};
+my $releaseName = $buildInfo->{nixname};
$releaseName =~ /nix-(.*)$/ or die;
my $version = $1;
-print STDERR "Nix revision is $nixRev, version is $version\n";
+print STDERR "Flake URL is $flakeUrl, Nix revision is $nixRev, version is $version\n";
my $releaseDir = "nix/$releaseName";
my $tmpDir = "$TMPDIR/nix-release/$releaseName";
File::Path::make_path($tmpDir);
+my $narCache = "$TMPDIR/nar-cache";
+File::Path::make_path($narCache);
+
+my $binaryCache = "https://cache.nixos.org/?local-nar-cache=$narCache";
+
# S3 setup.
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given.";
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given.";
@@ -76,6 +85,7 @@ sub downloadFile {
my ($jobName, $productNr, $dstName) = @_;
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
+ #print STDERR "$jobName: ", Dumper($buildInfo), "\n";
my $srcFile = $buildInfo->{buildproducts}->{$productNr}->{path} or die "job '$jobName' lacks product $productNr\n";
$dstName //= basename($srcFile);
@@ -83,19 +93,27 @@ sub downloadFile {
if (!-e $tmpFile) {
print STDERR "downloading $srcFile to $tmpFile...\n";
- system("NIX_REMOTE=https://cache.nixos.org/ nix store cat '$srcFile' > '$tmpFile'") == 0
+
+ my $fileInfo = decode_json(`NIX_REMOTE=$binaryCache nix store ls --json '$srcFile'`);
+
+ $srcFile = $fileInfo->{target} if $fileInfo->{type} eq 'symlink';
+
+ #print STDERR $srcFile, " ", Dumper($fileInfo), "\n";
+
+ system("NIX_REMOTE=$binaryCache nix store cat '$srcFile' > '$tmpFile'.tmp") == 0
or die "unable to fetch $srcFile\n";
+ rename("$tmpFile.tmp", $tmpFile) or die;
}
- my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash} or die;
+ my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash};
my $sha256_actual = `nix hash file --base16 --type sha256 '$tmpFile'`;
chomp $sha256_actual;
- if ($sha256_expected ne $sha256_actual) {
+ if (defined($sha256_expected) && $sha256_expected ne $sha256_actual) {
print STDERR "file $tmpFile is corrupt, got $sha256_actual, expected $sha256_expected\n";
exit 1;
}
- write_file("$tmpFile.sha256", $sha256_expected);
+ write_file("$tmpFile.sha256", $sha256_actual);
if (! -e "$tmpFile.asc") {
system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
@@ -104,8 +122,6 @@ sub downloadFile {
return $sha256_expected;
}
-downloadFile("tarball", "2"); # .tar.bz2
-my $tarballHash = downloadFile("tarball", "3"); # .tar.xz
downloadFile("binaryTarball.i686-linux", "1");
downloadFile("binaryTarball.x86_64-linux", "1");
downloadFile("binaryTarball.aarch64-linux", "1");
@@ -115,6 +131,60 @@ downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1");
downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1");
downloadFile("installerScript", "1");
+# Upload docker images to dockerhub.
+my $dockerManifest = "";
+my $dockerManifestLatest = "";
+
+for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
+ my $system = $platforms->[0];
+ my $dockerPlatform = $platforms->[1];
+ my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
+ downloadFile("dockerImage.$system", "1", $fn);
+
+ print STDERR "loading docker image for $dockerPlatform...\n";
+ system("docker load -i $tmpDir/$fn") == 0 or die;
+
+ my $tag = "nixos/nix:$version-$dockerPlatform";
+ my $latestTag = "nixos/nix:latest-$dockerPlatform";
+
+ print STDERR "tagging $version docker image for $dockerPlatform...\n";
+ system("docker tag nix:$version $tag") == 0 or die;
+
+ if ($isLatest) {
+ print STDERR "tagging latest docker image for $dockerPlatform...\n";
+ system("docker tag nix:$version $latestTag") == 0 or die;
+ }
+
+ print STDERR "pushing $version docker image for $dockerPlatform...\n";
+ system("docker push -q $tag") == 0 or die;
+
+ if ($isLatest) {
+ print STDERR "pushing latest docker image for $dockerPlatform...\n";
+ system("docker push -q $latestTag") == 0 or die;
+ }
+
+ $dockerManifest .= " --amend $tag";
+ $dockerManifestLatest .= " --amend $latestTag"
+}
+
+print STDERR "creating multi-platform docker manifest...\n";
+system("docker manifest rm nixos/nix:$version");
+system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
+if ($isLatest) {
+ print STDERR "creating latest multi-platform docker manifest...\n";
+ system("docker manifest rm nixos/nix:latest");
+ system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
+}
+
+print STDERR "pushing multi-platform docker manifest...\n";
+system("docker manifest push nixos/nix:$version") == 0 or die;
+
+if ($isLatest) {
+ print STDERR "pushing latest multi-platform docker manifest...\n";
+ system("docker manifest push nixos/nix:latest") == 0 or die;
+}
+
+# Upload release files to S3.
for my $fn (glob "$tmpDir/*") {
my $name = basename($fn);
my $dstKey = "$releaseDir/" . $name;
@@ -134,42 +204,38 @@ for my $fn (glob "$tmpDir/*") {
}
}
-exit if $version =~ /pre/;
-
# Update nix-fallback-paths.nix.
-system("cd $nixpkgsDir && git pull") == 0 or die;
+if ($isLatest) {
+ system("cd $nixpkgsDir && git pull") == 0 or die;
-sub getStorePath {
- my ($jobName) = @_;
- my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
- for my $product (values %{$buildInfo->{buildproducts}}) {
- next unless $product->{type} eq "nix-build";
- next if $product->{path} =~ /[a-z]+$/;
- return $product->{path};
+ sub getStorePath {
+ my ($jobName) = @_;
+ my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
+ return $buildInfo->{buildoutputs}->{out}->{path} or die "cannot get store path for '$jobName'";
}
- die;
+
+ write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",
+ "{\n" .
+ " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" .
+ " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" .
+ " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" .
+ " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" .
+ " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" .
+ "}\n");
+
+ system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die;
}
-write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",
- "{\n" .
- " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" .
- " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" .
- " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" .
- " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" .
- " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" .
- "}\n");
-
-system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die;
-
# Update the "latest" symlink.
$channelsBucket->add_key(
"nix-latest/install", "",
{ "x-amz-website-redirect-location" => "https://releases.nixos.org/$releaseDir/install" })
- or die $channelsBucket->err . ": " . $channelsBucket->errstr;
+ or die $channelsBucket->err . ": " . $channelsBucket->errstr
+ if $isLatest;
# Tag the release in Git.
chdir("/home/eelco/Dev/nix-pristine") or die;
system("git remote update origin") == 0 or die;
system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die;
system("git push --tags") == 0 or die;
-system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die;
+system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die if $isLatest;
diff --git a/misc/bash/completion.sh b/misc/bash/completion.sh
index bea2a40bc..045053dee 100644
--- a/misc/bash/completion.sh
+++ b/misc/bash/completion.sh
@@ -7,13 +7,15 @@ function _complete_nix {
local completion=${line%% *}
if [[ -z $have_type ]]; then
have_type=1
- if [[ $completion = filenames ]]; then
+ if [[ $completion == filenames ]]; then
compopt -o filenames
+ elif [[ $completion == attrs ]]; then
+ compopt -o nospace
fi
else
COMPREPLY+=("$completion")
fi
- done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}")
+ done < <(NIX_GET_COMPLETIONS=$cword "${words[@]/#\~/$HOME}" 2>/dev/null)
__ltrim_colon_completions "$cur"
}
diff --git a/misc/fish/completion.fish b/misc/fish/completion.fish
index bedbefaf8..c6b8ef16a 100644
--- a/misc/fish/completion.fish
+++ b/misc/fish/completion.fish
@@ -19,7 +19,6 @@ end
function _nix_accepts_files
set -l response (_nix_complete)
- # First line is either filenames or no-filenames.
test $response[1] = 'filenames'
end
diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in
index f1b439840..da1970f69 100644
--- a/misc/launchd/org.nixos.nix-daemon.plist.in
+++ b/misc/launchd/org.nixos.nix-daemon.plist.in
@@ -25,5 +25,10 @@
/var/log/nix-daemon.log
StandardOutPath
/dev/null
+ SoftResourceLimits
+
+ NumberOfFiles
+ 4096
+
diff --git a/misc/zsh/completion.zsh b/misc/zsh/completion.zsh
index a902e37dc..e702c721e 100644
--- a/misc/zsh/completion.zsh
+++ b/misc/zsh/completion.zsh
@@ -4,7 +4,7 @@ function _nix() {
local ifs_bk="$IFS"
local input=("${(Q)words[@]}")
IFS=$'\n'
- local res=($(NIX_GET_COMPLETIONS=$((CURRENT - 1)) "$input[@]"))
+ local res=($(NIX_GET_COMPLETIONS=$((CURRENT - 1)) "$input[@]" 2>/dev/null))
IFS="$ifs_bk"
local tpe="${${res[1]}%%> *}"
local -a suggestions
diff --git a/mk/libraries.mk b/mk/libraries.mk
index fd4d4ee72..ffd7b5610 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -91,7 +91,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
- $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED)
+ $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
ifndef HOST_DARWIN
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
@@ -105,7 +105,7 @@ define build-library
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
- $$(trace-ld) $(CXX) -o $$@ -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED)
+ $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
ifndef HOST_DARWIN
@@ -125,8 +125,8 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
- $(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
- $(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
+ $$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
+ $$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
diff --git a/mk/programs.mk b/mk/programs.mk
index 70b09f0dd..d0cf5baf0 100644
--- a/mk/programs.mk
+++ b/mk/programs.mk
@@ -32,7 +32,7 @@ define build-program
$$(eval $$(call create-dir, $$(_d)))
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
- $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS)
+ $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
$(1)_INSTALL_DIR ?= $$(bindir)
@@ -49,7 +49,7 @@ define build-program
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
- $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS)
+ $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
else
diff --git a/mk/tests.mk b/mk/tests.mk
index 21bdc5748..a2e30a378 100644
--- a/mk/tests.mk
+++ b/mk/tests.mk
@@ -13,3 +13,7 @@ define run-install-test
endef
.PHONY: check installcheck
+
+print-top-help += \
+ echo " check: Run unit tests"; \
+ echo " installcheck: Run functional tests";
diff --git a/nix-rust/Cargo.lock b/nix-rust/Cargo.lock
deleted file mode 100644
index 957c01e5a..000000000
--- a/nix-rust/Cargo.lock
+++ /dev/null
@@ -1,399 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-[[package]]
-name = "assert_matches"
-version = "1.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "autocfg"
-version = "0.1.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "bit-set"
-version = "0.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "bit-vec"
-version = "0.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "bitflags"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "byteorder"
-version = "1.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "c2-chacha"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "cfg-if"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "cloudabi"
-version = "0.0.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "fnv"
-version = "1.0.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "fuchsia-cprng"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "getrandom"
-version = "0.1.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "hex"
-version = "0.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "libc"
-version = "0.2.66"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "nix-rust"
-version = "0.1.0"
-dependencies = [
- "assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-traits"
-version = "0.2.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "ppv-lite86"
-version = "0.2.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "proptest"
-version = "0.9.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
- "rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "quick-error"
-version = "1.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "rand"
-version = "0.6.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand"
-version = "0.7.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "rand_core"
-version = "0.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_hc"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_hc"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_isaac"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_jitter"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_os"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_pcg"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_xorshift"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rdrand"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "redox_syscall"
-version = "0.1.56"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "regex-syntax"
-version = "0.6.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "remove_dir_all"
-version = "0.5.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rusty-fork"
-version = "0.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "tempfile"
-version = "3.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
- "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "wait-timeout"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "wasi"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "winapi"
-version = "0.3.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[metadata]
-"checksum assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7deb0a829ca7bcfaf5da70b073a8d128619259a7be8216a355e23f00763059e5"
-"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
-"checksum bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80"
-"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
-"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
-"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
-"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
-"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
-"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
-"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
-"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
-"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407"
-"checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77"
-"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
-"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4"
-"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
-"checksum proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf147e022eacf0c8a054ab864914a7602618adba841d800a9a9868a5237a529f"
-"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
-"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
-"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
-"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
-"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
-"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
-"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
-"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
-"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
-"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
-"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
-"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
-"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
-"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
-"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
-"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
-"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
-"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
-"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
-"checksum rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3dd93264e10c577503e926bd1430193eeb5d21b059148910082245309b424fae"
-"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
-"checksum wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
-"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
-"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
-"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/nix-rust/Cargo.toml b/nix-rust/Cargo.toml
deleted file mode 100644
index 1372e5a73..000000000
--- a/nix-rust/Cargo.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-[package]
-name = "nix-rust"
-version = "0.1.0"
-authors = ["Eelco Dolstra "]
-edition = "2018"
-
-[lib]
-name = "nixrust"
-crate-type = ["cdylib"]
-
-[dependencies]
-libc = "0.2"
-#futures-preview = { version = "=0.3.0-alpha.19" }
-#hyper = "0.13.0-alpha.4"
-#http = "0.1"
-#tokio = { version = "0.2.0-alpha.6", default-features = false, features = ["rt-full"] }
-lazy_static = "1.4"
-#byteorder = "1.3"
-
-[dev-dependencies]
-hex = "0.3"
-assert_matches = "1.3"
-proptest = "0.9"
diff --git a/nix-rust/local.mk b/nix-rust/local.mk
deleted file mode 100644
index 538244594..000000000
--- a/nix-rust/local.mk
+++ /dev/null
@@ -1,48 +0,0 @@
-ifeq ($(OPTIMIZE), 1)
- RUST_MODE = --release
- RUST_DIR = release
-else
- RUST_MODE =
- RUST_DIR = debug
-endif
-
-libnixrust_PATH := $(d)/target/$(RUST_DIR)/libnixrust.$(SO_EXT)
-libnixrust_INSTALL_PATH := $(libdir)/libnixrust.$(SO_EXT)
-libnixrust_LDFLAGS_USE := -L$(d)/target/$(RUST_DIR) -lnixrust
-libnixrust_LDFLAGS_USE_INSTALLED := -L$(libdir) -lnixrust
-
-ifdef HOST_LINUX
-libnixrust_LDFLAGS_USE += -ldl
-libnixrust_LDFLAGS_USE_INSTALLED += -ldl
-endif
-
-ifdef HOST_DARWIN
-libnixrust_BUILD_FLAGS = NIX_LDFLAGS="-undefined dynamic_lookup"
-else
-libnixrust_LDFLAGS_USE += -Wl,-rpath,$(abspath $(d)/target/$(RUST_DIR))
-libnixrust_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$(libdir)
-endif
-
-$(libnixrust_PATH): $(call rwildcard, $(d)/src, *.rs) $(d)/Cargo.toml
- $(trace-gen) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) \
- $(libnixrust_BUILD_FLAGS) \
- cargo build $(RUST_MODE) $$(if [[ -d vendor ]]; then echo --offline; fi) \
- && touch target/$(RUST_DIR)/libnixrust.$(SO_EXT)
-
-$(libnixrust_INSTALL_PATH): $(libnixrust_PATH)
- $(target-gen) cp $^ $@
-ifdef HOST_DARWIN
- install_name_tool -id $@ $@
-endif
-
-clean: clean-rust
-
-clean-rust:
- $(suppress) rm -rfv nix-rust/target
-
-ifndef HOST_DARWIN
-check: rust-tests
-
-rust-tests:
- $(trace-test) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) cargo test --release $$(if [[ -d vendor ]]; then echo --offline; fi)
-endif
diff --git a/nix-rust/src/c.rs b/nix-rust/src/c.rs
deleted file mode 100644
index c1358545f..000000000
--- a/nix-rust/src/c.rs
+++ /dev/null
@@ -1,77 +0,0 @@
-use super::{error, store::path, store::StorePath, util};
-
-#[no_mangle]
-pub unsafe extern "C" fn ffi_String_new(s: &str, out: *mut String) {
- // FIXME: check whether 's' is valid UTF-8?
- out.write(s.to_string())
-}
-
-#[no_mangle]
-pub unsafe extern "C" fn ffi_String_drop(self_: *mut String) {
- std::ptr::drop_in_place(self_);
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_new(
- path: &str,
- store_dir: &str,
-) -> Result {
- StorePath::new(std::path::Path::new(path), std::path::Path::new(store_dir))
- .map_err(|err| err.into())
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_new2(
- hash: &[u8; crate::store::path::STORE_PATH_HASH_BYTES],
- name: &str,
-) -> Result {
- StorePath::from_parts(*hash, name).map_err(|err| err.into())
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_fromBaseName(
- base_name: &str,
-) -> Result {
- StorePath::new_from_base_name(base_name).map_err(|err| err.into())
-}
-
-#[no_mangle]
-pub unsafe extern "C" fn ffi_StorePath_drop(self_: *mut StorePath) {
- std::ptr::drop_in_place(self_);
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_to_string(self_: &StorePath) -> Vec {
- let mut buf = vec![0; path::STORE_PATH_HASH_CHARS + 1 + self_.name.name().len()];
- util::base32::encode_into(self_.hash.hash(), &mut buf[0..path::STORE_PATH_HASH_CHARS]);
- buf[path::STORE_PATH_HASH_CHARS] = b'-';
- buf[path::STORE_PATH_HASH_CHARS + 1..].clone_from_slice(self_.name.name().as_bytes());
- buf
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_less_than(a: &StorePath, b: &StorePath) -> bool {
- a < b
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_eq(a: &StorePath, b: &StorePath) -> bool {
- a == b
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_clone(self_: &StorePath) -> StorePath {
- self_.clone()
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_name(self_: &StorePath) -> &str {
- self_.name.name()
-}
-
-#[no_mangle]
-pub extern "C" fn ffi_StorePath_hash_data(
- self_: &StorePath,
-) -> &[u8; crate::store::path::STORE_PATH_HASH_BYTES] {
- self_.hash.hash()
-}
diff --git a/nix-rust/src/error.rs b/nix-rust/src/error.rs
deleted file mode 100644
index bb0c9a933..000000000
--- a/nix-rust/src/error.rs
+++ /dev/null
@@ -1,118 +0,0 @@
-use std::fmt;
-
-#[derive(Debug)]
-pub enum Error {
- InvalidPath(crate::store::StorePath),
- BadStorePath(std::path::PathBuf),
- NotInStore(std::path::PathBuf),
- BadNarInfo,
- BadBase32,
- StorePathNameEmpty,
- StorePathNameTooLong,
- BadStorePathName,
- NarSizeFieldTooBig,
- BadNarString,
- BadNarPadding,
- BadNarVersionMagic,
- MissingNarOpenTag,
- MissingNarCloseTag,
- MissingNarField,
- BadNarField(String),
- BadExecutableField,
- IOError(std::io::Error),
- #[cfg(unused)]
- HttpError(hyper::error::Error),
- Misc(String),
- #[cfg(not(test))]
- Foreign(CppException),
- BadTarFileMemberName(String),
-}
-
-impl From for Error {
- fn from(err: std::io::Error) -> Self {
- Error::IOError(err)
- }
-}
-
-#[cfg(unused)]
-impl From for Error {
- fn from(err: hyper::error::Error) -> Self {
- Error::HttpError(err)
- }
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- Error::InvalidPath(_) => write!(f, "invalid path"),
- Error::BadNarInfo => write!(f, ".narinfo file is corrupt"),
- Error::BadStorePath(path) => write!(f, "path '{}' is not a store path", path.display()),
- Error::NotInStore(path) => {
- write!(f, "path '{}' is not in the Nix store", path.display())
- }
- Error::BadBase32 => write!(f, "invalid base32 string"),
- Error::StorePathNameEmpty => write!(f, "store path name is empty"),
- Error::StorePathNameTooLong => {
- write!(f, "store path name is longer than 211 characters")
- }
- Error::BadStorePathName => write!(f, "store path name contains forbidden character"),
- Error::NarSizeFieldTooBig => write!(f, "size field in NAR is too big"),
- Error::BadNarString => write!(f, "NAR string is not valid UTF-8"),
- Error::BadNarPadding => write!(f, "NAR padding is not zero"),
- Error::BadNarVersionMagic => write!(f, "unsupported NAR version"),
- Error::MissingNarOpenTag => write!(f, "NAR open tag is missing"),
- Error::MissingNarCloseTag => write!(f, "NAR close tag is missing"),
- Error::MissingNarField => write!(f, "expected NAR field is missing"),
- Error::BadNarField(s) => write!(f, "unrecognized NAR field '{}'", s),
- Error::BadExecutableField => write!(f, "bad 'executable' field in NAR"),
- Error::IOError(err) => write!(f, "I/O error: {}", err),
- #[cfg(unused)]
- Error::HttpError(err) => write!(f, "HTTP error: {}", err),
- #[cfg(not(test))]
- Error::Foreign(_) => write!(f, ""), // FIXME
- Error::Misc(s) => write!(f, "{}", s),
- Error::BadTarFileMemberName(s) => {
- write!(f, "tar archive contains illegal file name '{}'", s)
- }
- }
- }
-}
-
-#[cfg(not(test))]
-impl From for CppException {
- fn from(err: Error) -> Self {
- match err {
- Error::Foreign(ex) => ex,
- _ => CppException::new(&err.to_string()),
- }
- }
-}
-
-#[cfg(not(test))]
-#[repr(C)]
-#[derive(Debug)]
-pub struct CppException(*const libc::c_void); // == std::exception_ptr*
-
-#[cfg(not(test))]
-impl CppException {
- fn new(s: &str) -> Self {
- Self(unsafe { make_error(s) })
- }
-}
-
-#[cfg(not(test))]
-impl Drop for CppException {
- fn drop(&mut self) {
- unsafe {
- destroy_error(self.0);
- }
- }
-}
-
-#[cfg(not(test))]
-extern "C" {
- #[allow(improper_ctypes)] // YOLO
- fn make_error(s: &str) -> *const libc::c_void;
-
- fn destroy_error(exc: *const libc::c_void);
-}
diff --git a/nix-rust/src/lib.rs b/nix-rust/src/lib.rs
deleted file mode 100644
index 101de106f..000000000
--- a/nix-rust/src/lib.rs
+++ /dev/null
@@ -1,10 +0,0 @@
-#[allow(improper_ctypes_definitions)]
-#[cfg(not(test))]
-mod c;
-mod error;
-#[cfg(unused)]
-mod nar;
-mod store;
-mod util;
-
-pub use error::Error;
diff --git a/nix-rust/src/nar.rs b/nix-rust/src/nar.rs
deleted file mode 100644
index cb520935e..000000000
--- a/nix-rust/src/nar.rs
+++ /dev/null
@@ -1,126 +0,0 @@
-use crate::Error;
-use byteorder::{LittleEndian, ReadBytesExt};
-use std::convert::TryFrom;
-use std::io::Read;
-
-pub fn parse(input: &mut R) -> Result<(), Error> {
- if String::read(input)? != NAR_VERSION_MAGIC {
- return Err(Error::BadNarVersionMagic);
- }
-
- parse_file(input)
-}
-
-const NAR_VERSION_MAGIC: &str = "nix-archive-1";
-
-fn parse_file(input: &mut R) -> Result<(), Error> {
- if String::read(input)? != "(" {
- return Err(Error::MissingNarOpenTag);
- }
-
- if String::read(input)? != "type" {
- return Err(Error::MissingNarField);
- }
-
- match String::read(input)?.as_ref() {
- "regular" => {
- let mut _executable = false;
- let mut tag = String::read(input)?;
- if tag == "executable" {
- _executable = true;
- if String::read(input)? != "" {
- return Err(Error::BadExecutableField);
- }
- tag = String::read(input)?;
- }
- if tag != "contents" {
- return Err(Error::MissingNarField);
- }
- let _contents = Vec::::read(input)?;
- if String::read(input)? != ")" {
- return Err(Error::MissingNarCloseTag);
- }
- }
- "directory" => loop {
- match String::read(input)?.as_ref() {
- "entry" => {
- if String::read(input)? != "(" {
- return Err(Error::MissingNarOpenTag);
- }
- if String::read(input)? != "name" {
- return Err(Error::MissingNarField);
- }
- let _name = String::read(input)?;
- if String::read(input)? != "node" {
- return Err(Error::MissingNarField);
- }
- parse_file(input)?;
- let tag = String::read(input)?;
- if tag != ")" {
- return Err(Error::MissingNarCloseTag);
- }
- }
- ")" => break,
- s => return Err(Error::BadNarField(s.into())),
- }
- },
- "symlink" => {
- if String::read(input)? != "target" {
- return Err(Error::MissingNarField);
- }
- let _target = String::read(input)?;
- if String::read(input)? != ")" {
- return Err(Error::MissingNarCloseTag);
- }
- }
- s => return Err(Error::BadNarField(s.into())),
- }
-
- Ok(())
-}
-
-trait Deserialize: Sized {
- fn read(input: &mut R) -> Result;
-}
-
-impl Deserialize for String {
- fn read(input: &mut R) -> Result {
- let buf = Deserialize::read(input)?;
- Ok(String::from_utf8(buf).map_err(|_| Error::BadNarString)?)
- }
-}
-
-impl Deserialize for Vec {
- fn read(input: &mut R) -> Result {
- let n: usize = Deserialize::read(input)?;
- let mut buf = vec![0; n];
- input.read_exact(&mut buf)?;
- skip_padding(input, n)?;
- Ok(buf)
- }
-}
-
-fn skip_padding(input: &mut R, len: usize) -> Result<(), Error> {
- if len % 8 != 0 {
- let mut buf = [0; 8];
- let buf = &mut buf[0..8 - (len % 8)];
- input.read_exact(buf)?;
- if !buf.iter().all(|b| *b == 0) {
- return Err(Error::BadNarPadding);
- }
- }
- Ok(())
-}
-
-impl Deserialize for u64 {
- fn read(input: &mut R) -> Result {
- Ok(input.read_u64::()?)
- }
-}
-
-impl Deserialize for usize {
- fn read(input: &mut R) -> Result {
- let n: u64 = Deserialize::read(input)?;
- Ok(usize::try_from(n).map_err(|_| Error::NarSizeFieldTooBig)?)
- }
-}
diff --git a/nix-rust/src/store/binary_cache_store.rs b/nix-rust/src/store/binary_cache_store.rs
deleted file mode 100644
index 9e1e88b7c..000000000
--- a/nix-rust/src/store/binary_cache_store.rs
+++ /dev/null
@@ -1,48 +0,0 @@
-use super::{PathInfo, Store, StorePath};
-use crate::Error;
-use hyper::client::Client;
-
-pub struct BinaryCacheStore {
- base_uri: String,
- client: Client,
-}
-
-impl BinaryCacheStore {
- pub fn new(base_uri: String) -> Self {
- Self {
- base_uri,
- client: Client::new(),
- }
- }
-}
-
-impl Store for BinaryCacheStore {
- fn query_path_info(
- &self,
- path: &StorePath,
- ) -> std::pin::Pin> + Send>> {
- let uri = format!("{}/{}.narinfo", self.base_uri.clone(), path.hash);
- let path = path.clone();
- let client = self.client.clone();
- let store_dir = self.store_dir().to_string();
-
- Box::pin(async move {
- let response = client.get(uri.parse::().unwrap()).await?;
-
- if response.status() == hyper::StatusCode::NOT_FOUND
- || response.status() == hyper::StatusCode::FORBIDDEN
- {
- return Err(Error::InvalidPath(path));
- }
-
- let mut body = response.into_body();
-
- let mut bytes = Vec::new();
- while let Some(next) = body.next().await {
- bytes.extend(next?);
- }
-
- PathInfo::parse_nar_info(std::str::from_utf8(&bytes).unwrap(), &store_dir)
- })
- }
-}
diff --git a/nix-rust/src/store/mod.rs b/nix-rust/src/store/mod.rs
deleted file mode 100644
index da972482c..000000000
--- a/nix-rust/src/store/mod.rs
+++ /dev/null
@@ -1,17 +0,0 @@
-pub mod path;
-
-#[cfg(unused)]
-mod binary_cache_store;
-#[cfg(unused)]
-mod path_info;
-#[cfg(unused)]
-mod store;
-
-pub use path::{StorePath, StorePathHash, StorePathName};
-
-#[cfg(unused)]
-pub use binary_cache_store::BinaryCacheStore;
-#[cfg(unused)]
-pub use path_info::PathInfo;
-#[cfg(unused)]
-pub use store::Store;
diff --git a/nix-rust/src/store/path.rs b/nix-rust/src/store/path.rs
deleted file mode 100644
index 99f7a1f18..000000000
--- a/nix-rust/src/store/path.rs
+++ /dev/null
@@ -1,224 +0,0 @@
-use crate::error::Error;
-use crate::util::base32;
-use std::fmt;
-use std::path::Path;
-
-#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
-pub struct StorePath {
- pub hash: StorePathHash,
- pub name: StorePathName,
-}
-
-pub const STORE_PATH_HASH_BYTES: usize = 20;
-pub const STORE_PATH_HASH_CHARS: usize = 32;
-
-impl StorePath {
- pub fn new(path: &Path, store_dir: &Path) -> Result {
- if path.parent() != Some(store_dir) {
- return Err(Error::NotInStore(path.into()));
- }
- Self::new_from_base_name(
- path.file_name()
- .ok_or_else(|| Error::BadStorePath(path.into()))?
- .to_str()
- .ok_or_else(|| Error::BadStorePath(path.into()))?,
- )
- }
-
- pub fn from_parts(hash: [u8; STORE_PATH_HASH_BYTES], name: &str) -> Result {
- Ok(StorePath {
- hash: StorePathHash(hash),
- name: StorePathName::new(name)?,
- })
- }
-
- pub fn new_from_base_name(base_name: &str) -> Result {
- if base_name.len() < STORE_PATH_HASH_CHARS + 1
- || base_name.as_bytes()[STORE_PATH_HASH_CHARS] != b'-'
- {
- return Err(Error::BadStorePath(base_name.into()));
- }
-
- Ok(StorePath {
- hash: StorePathHash::new(&base_name[0..STORE_PATH_HASH_CHARS])?,
- name: StorePathName::new(&base_name[STORE_PATH_HASH_CHARS + 1..])?,
- })
- }
-}
-
-impl fmt::Display for StorePath {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "{}-{}", self.hash, self.name)
- }
-}
-
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct StorePathHash([u8; STORE_PATH_HASH_BYTES]);
-
-impl StorePathHash {
- pub fn new(s: &str) -> Result {
- assert_eq!(s.len(), STORE_PATH_HASH_CHARS);
- let v = base32::decode(s)?;
- assert_eq!(v.len(), STORE_PATH_HASH_BYTES);
- let mut bytes: [u8; 20] = Default::default();
- bytes.copy_from_slice(&v[0..STORE_PATH_HASH_BYTES]);
- Ok(Self(bytes))
- }
-
- pub fn hash(&self) -> &[u8; STORE_PATH_HASH_BYTES] {
- &self.0
- }
-}
-
-impl fmt::Display for StorePathHash {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut buf = vec![0; STORE_PATH_HASH_CHARS];
- base32::encode_into(&self.0, &mut buf);
- f.write_str(std::str::from_utf8(&buf).unwrap())
- }
-}
-
-impl Ord for StorePathHash {
- fn cmp(&self, other: &Self) -> std::cmp::Ordering {
- // Historically we've sorted store paths by their base32
- // serialization, but our base32 encodes bytes in reverse
- // order. So compare them in reverse order as well.
- self.0.iter().rev().cmp(other.0.iter().rev())
- }
-}
-
-impl PartialOrd for StorePathHash {
- fn partial_cmp(&self, other: &Self) -> Option {
- Some(self.cmp(other))
- }
-}
-
-#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
-pub struct StorePathName(String);
-
-impl StorePathName {
- pub fn new(s: &str) -> Result {
- if s.is_empty() {
- return Err(Error::StorePathNameEmpty);
- }
-
- if s.len() > 211 {
- return Err(Error::StorePathNameTooLong);
- }
-
- let is_good_path_name = s.chars().all(|c| {
- c.is_ascii_alphabetic()
- || c.is_ascii_digit()
- || c == '+'
- || c == '-'
- || c == '.'
- || c == '_'
- || c == '?'
- || c == '='
- });
- if s.starts_with('.') || !is_good_path_name {
- return Err(Error::BadStorePathName);
- }
-
- Ok(Self(s.to_string()))
- }
-
- pub fn name(&self) -> &str {
- &self.0
- }
-}
-
-impl fmt::Display for StorePathName {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.write_str(&self.0)
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use assert_matches::assert_matches;
-
- #[test]
- fn test_parse() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3";
- let p = StorePath::new_from_base_name(&s).unwrap();
- assert_eq!(p.name.0, "konsole-18.12.3");
- assert_eq!(
- p.hash.0,
- [
- 0x9f, 0x76, 0x49, 0x20, 0xf6, 0x5d, 0xe9, 0x71, 0xc4, 0xca, 0x46, 0x21, 0xab, 0xff,
- 0x9b, 0x44, 0xef, 0x87, 0x0f, 0x3c
- ]
- );
- }
-
- #[test]
- fn test_no_name() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-";
- assert_matches!(
- StorePath::new_from_base_name(&s),
- Err(Error::StorePathNameEmpty)
- );
- }
-
- #[test]
- fn test_no_dash() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz";
- assert_matches!(
- StorePath::new_from_base_name(&s),
- Err(Error::BadStorePath(_))
- );
- }
-
- #[test]
- fn test_short_hash() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxl-konsole-18.12.3";
- assert_matches!(
- StorePath::new_from_base_name(&s),
- Err(Error::BadStorePath(_))
- );
- }
-
- #[test]
- fn test_invalid_hash() {
- let s = "7h7qgvs4kgzsn8e6rb273saxyqh4jxlz-konsole-18.12.3";
- assert_matches!(StorePath::new_from_base_name(&s), Err(Error::BadBase32));
- }
-
- #[test]
- fn test_long_name() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
- assert_matches!(StorePath::new_from_base_name(&s), Ok(_));
- }
-
- #[test]
- fn test_too_long_name() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
- assert_matches!(
- StorePath::new_from_base_name(&s),
- Err(Error::StorePathNameTooLong)
- );
- }
-
- #[test]
- fn test_bad_name() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-foo bar";
- assert_matches!(
- StorePath::new_from_base_name(&s),
- Err(Error::BadStorePathName)
- );
-
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-kónsole";
- assert_matches!(
- StorePath::new_from_base_name(&s),
- Err(Error::BadStorePathName)
- );
- }
-
- #[test]
- fn test_roundtrip() {
- let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3";
- assert_eq!(StorePath::new_from_base_name(&s).unwrap().to_string(), s);
- }
-}
diff --git a/nix-rust/src/store/path_info.rs b/nix-rust/src/store/path_info.rs
deleted file mode 100644
index c2903ed29..000000000
--- a/nix-rust/src/store/path_info.rs
+++ /dev/null
@@ -1,70 +0,0 @@
-use crate::store::StorePath;
-use crate::Error;
-use std::collections::BTreeSet;
-
-#[derive(Clone, Debug)]
-pub struct PathInfo {
- pub path: StorePath,
- pub references: BTreeSet,
- pub nar_size: u64,
- pub deriver: Option,
-
- // Additional binary cache info.
- pub url: Option,
- pub compression: Option,
- pub file_size: Option,
-}
-
-impl PathInfo {
- pub fn parse_nar_info(nar_info: &str, store_dir: &str) -> Result {
- let mut path = None;
- let mut references = BTreeSet::new();
- let mut nar_size = None;
- let mut deriver = None;
- let mut url = None;
- let mut compression = None;
- let mut file_size = None;
-
- for line in nar_info.lines() {
- let colon = line.find(':').ok_or(Error::BadNarInfo)?;
-
- let (name, value) = line.split_at(colon);
-
- if !value.starts_with(": ") {
- return Err(Error::BadNarInfo);
- }
-
- let value = &value[2..];
-
- if name == "StorePath" {
- path = Some(StorePath::new(std::path::Path::new(value), store_dir)?);
- } else if name == "NarSize" {
- nar_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?);
- } else if name == "References" {
- if !value.is_empty() {
- for r in value.split(' ') {
- references.insert(StorePath::new_from_base_name(r)?);
- }
- }
- } else if name == "Deriver" {
- deriver = Some(StorePath::new_from_base_name(value)?);
- } else if name == "URL" {
- url = Some(value.into());
- } else if name == "Compression" {
- compression = Some(value.into());
- } else if name == "FileSize" {
- file_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?);
- }
- }
-
- Ok(PathInfo {
- path: path.ok_or(Error::BadNarInfo)?,
- references,
- nar_size: nar_size.ok_or(Error::BadNarInfo)?,
- deriver,
- url: Some(url.ok_or(Error::BadNarInfo)?),
- compression,
- file_size,
- })
- }
-}
diff --git a/nix-rust/src/store/store.rs b/nix-rust/src/store/store.rs
deleted file mode 100644
index c33dc4a90..000000000
--- a/nix-rust/src/store/store.rs
+++ /dev/null
@@ -1,53 +0,0 @@
-use super::{PathInfo, StorePath};
-use crate::Error;
-use std::collections::{BTreeMap, BTreeSet};
-use std::path::Path;
-
-pub trait Store: Send + Sync {
- fn store_dir(&self) -> &str {
- "/nix/store"
- }
-
- fn query_path_info(
- &self,
- store_path: &StorePath,
- ) -> std::pin::Pin> + Send>>;
-}
-
-impl dyn Store {
- pub fn parse_store_path(&self, path: &Path) -> Result {
- StorePath::new(path, self.store_dir())
- }
-
- pub async fn compute_path_closure(
- &self,
- roots: BTreeSet,
- ) -> Result, Error> {
- let mut done = BTreeSet::new();
- let mut result = BTreeMap::new();
- let mut pending = vec![];
-
- for root in roots {
- pending.push(self.query_path_info(&root));
- done.insert(root);
- }
-
- while !pending.is_empty() {
- let (info, _, remaining) = futures::future::select_all(pending).await;
- pending = remaining;
-
- let info = info?;
-
- for path in &info.references {
- if !done.contains(path) {
- pending.push(self.query_path_info(&path));
- done.insert(path.clone());
- }
- }
-
- result.insert(info.path.clone(), info);
- }
-
- Ok(result)
- }
-}
diff --git a/nix-rust/src/util/base32.rs b/nix-rust/src/util/base32.rs
deleted file mode 100644
index 7e71dc920..000000000
--- a/nix-rust/src/util/base32.rs
+++ /dev/null
@@ -1,160 +0,0 @@
-use crate::error::Error;
-use lazy_static::lazy_static;
-
-pub fn encoded_len(input_len: usize) -> usize {
- if input_len == 0 {
- 0
- } else {
- (input_len * 8 - 1) / 5 + 1
- }
-}
-
-pub fn decoded_len(input_len: usize) -> usize {
- input_len * 5 / 8
-}
-
-static BASE32_CHARS: &[u8; 32] = &b"0123456789abcdfghijklmnpqrsvwxyz";
-
-lazy_static! {
- static ref BASE32_CHARS_REVERSE: Box<[u8; 256]> = {
- let mut xs = [0xffu8; 256];
- for (n, c) in BASE32_CHARS.iter().enumerate() {
- xs[*c as usize] = n as u8;
- }
- Box::new(xs)
- };
-}
-
-pub fn encode(input: &[u8]) -> String {
- let mut buf = vec![0; encoded_len(input.len())];
- encode_into(input, &mut buf);
- std::str::from_utf8(&buf).unwrap().to_string()
-}
-
-pub fn encode_into(input: &[u8], output: &mut [u8]) {
- let len = encoded_len(input.len());
- assert_eq!(len, output.len());
-
- let mut nr_bits_left: usize = 0;
- let mut bits_left: u16 = 0;
- let mut pos = len;
-
- for b in input {
- bits_left |= (*b as u16) << nr_bits_left;
- nr_bits_left += 8;
- while nr_bits_left > 5 {
- output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize];
- pos -= 1;
- bits_left >>= 5;
- nr_bits_left -= 5;
- }
- }
-
- if nr_bits_left > 0 {
- output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize];
- pos -= 1;
- }
-
- assert_eq!(pos, 0);
-}
-
-pub fn decode(input: &str) -> Result, crate::Error> {
- let mut res = Vec::with_capacity(decoded_len(input.len()));
-
- let mut nr_bits_left: usize = 0;
- let mut bits_left: u16 = 0;
-
- for c in input.chars().rev() {
- let b = BASE32_CHARS_REVERSE[c as usize];
- if b == 0xff {
- return Err(Error::BadBase32);
- }
- bits_left |= (b as u16) << nr_bits_left;
- nr_bits_left += 5;
- if nr_bits_left >= 8 {
- res.push((bits_left & 0xff) as u8);
- bits_left >>= 8;
- nr_bits_left -= 8;
- }
- }
-
- if nr_bits_left > 0 && bits_left != 0 {
- return Err(Error::BadBase32);
- }
-
- Ok(res)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use assert_matches::assert_matches;
- use hex;
- use proptest::proptest;
-
- #[test]
- fn test_encode() {
- assert_eq!(encode(&[]), "");
-
- assert_eq!(
- encode(&hex::decode("0839703786356bca59b0f4a32987eb2e6de43ae8").unwrap()),
- "x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88"
- );
-
- assert_eq!(
- encode(
- &hex::decode("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
- .unwrap()
- ),
- "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
- );
-
- assert_eq!(
- encode(
- &hex::decode("ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
- .unwrap()
- ),
- "2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx"
- );
- }
-
- #[test]
- fn test_decode() {
- assert_eq!(hex::encode(decode("").unwrap()), "");
-
- assert_eq!(
- hex::encode(decode("x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88").unwrap()),
- "0839703786356bca59b0f4a32987eb2e6de43ae8"
- );
-
- assert_eq!(
- hex::encode(decode("1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s").unwrap()),
- "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"
- );
-
- assert_eq!(
- hex::encode(decode("2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx").unwrap()),
- "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
- );
-
- assert_matches!(
- decode("xoxf8v9fxf3jk8zln1cwlsrmhqvp0f88"),
- Err(Error::BadBase32)
- );
- assert_matches!(
- decode("2b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"),
- Err(Error::BadBase32)
- );
- assert_matches!(decode("2"), Err(Error::BadBase32));
- assert_matches!(decode("2gs"), Err(Error::BadBase32));
- assert_matches!(decode("2gs8"), Err(Error::BadBase32));
- }
-
- proptest! {
-
- #[test]
- fn roundtrip(s: Vec) {
- assert_eq!(s, decode(&encode(&s)).unwrap());
- }
- }
-}
diff --git a/nix-rust/src/util/mod.rs b/nix-rust/src/util/mod.rs
deleted file mode 100644
index eaad9d406..000000000
--- a/nix-rust/src/util/mod.rs
+++ /dev/null
@@ -1 +0,0 @@
-pub mod base32;
diff --git a/perl/configure.ac b/perl/configure.ac
index eb65ac17b..a02cb06c9 100644
--- a/perl/configure.ac
+++ b/perl/configure.ac
@@ -41,7 +41,7 @@ perlarchname=$($perl -e 'use Config; print $Config{archname};')
AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname])
AC_MSG_RESULT($perllibdir)
-# Look for libsodium, an optional dependency.
+# Look for libsodium.
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
# Check for the required Perl dependencies (DBI and DBD::SQLite).
diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in
index f7c6f2484..508a15e15 100644
--- a/perl/lib/Nix/Config.pm.in
+++ b/perl/lib/Nix/Config.pm.in
@@ -1,6 +1,7 @@
package Nix::Config;
use MIME::Base64;
+use Nix::Store;
$version = "@PACKAGE_VERSION@";
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index edbf12f7c..54ad1680c 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -240,7 +240,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
PPCODE:
try {
auto h = Hash::parseAny(s, parseHashType(algo));
- string s = h.to_string(toBase32 ? Base32 : Base16, false);
+ auto s = h.to_string(toBase32 ? Base32 : Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
diff --git a/scripts/check-hydra-status.sh b/scripts/check-hydra-status.sh
new file mode 100644
index 000000000..5e2f03429
--- /dev/null
+++ b/scripts/check-hydra-status.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+# set -x
+
+
+# mapfile BUILDS_FOR_LATEST_EVAL < <(
+# curl -H 'Accept: application/json' https://hydra.nixos.org/jobset/nix/master/evals | \
+# jq -r '.evals[0].builds[] | @sh')
+BUILDS_FOR_LATEST_EVAL=$(
+curl -sS -H 'Accept: application/json' https://hydra.nixos.org/jobset/nix/master/evals | \
+ jq -r '.evals[0].builds[]')
+
+someBuildFailed=0
+
+for buildId in $BUILDS_FOR_LATEST_EVAL; do
+ buildInfo=$(curl -sS -H 'Accept: application/json' "https://hydra.nixos.org/build/$buildId")
+
+ finished=$(echo "$buildInfo" | jq -r '.finished')
+
+ if [[ $finished = 0 ]]; then
+ continue
+ fi
+
+ buildStatus=$(echo "$buildInfo" | jq -r '.buildstatus')
+
+ if [[ $buildStatus != 0 ]]; then
+ someBuildFailed=1
+ echo "Job “$(echo "$buildInfo" | jq -r '.job')” failed on hydra: $buildInfo"
+ fi
+done
+
+exit "$someBuildFailed"
diff --git a/scripts/create-darwin-volume.sh b/scripts/create-darwin-volume.sh
index b52232dd3..4bac4b7ba 100755
--- a/scripts/create-darwin-volume.sh
+++ b/scripts/create-darwin-volume.sh
@@ -246,7 +246,8 @@ get_volume_pass() {
verify_volume_pass() {
local volume_special="$1" # (i.e., disk1s7)
local volume_uuid="$2"
- /usr/sbin/diskutil apfs unlockVolume "$volume_special" -verify -stdinpassphrase -user "$volume_uuid"
+ _sudo "to confirm the password actually unlocks the volume" \
+ /usr/sbin/diskutil apfs unlockVolume "$volume_special" -verify -stdinpassphrase -user "$volume_uuid"
}
volume_pass_works() {
@@ -440,7 +441,22 @@ add_nix_vol_fstab_line() {
# shellcheck disable=SC1003,SC2026
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
shift
- EDITOR="/usr/bin/ex" _sudo "to add nix to fstab" "$@" < "$SCRATCH/ex_cleanroom_wrapper" <&2
+
# Note: mount/unmount are late additions to support the right order
# of operations for creating the volume and then baking its uuid into
# other artifacts; not as well-trod wrt to potential errors, race
# conditions, etc.
- /usr/sbin/diskutil mount "$volume_label"
+ _sudo "to mount your Nix volume for encrypting" \
+ /usr/sbin/diskutil mount "$volume_label"
password="$(/usr/bin/xxd -l 32 -p -c 256 /dev/random)"
_sudo "to add your Nix volume's password to Keychain" \
/usr/bin/security -i <&2
- _sudo "to install the Nix volume mounter" /usr/bin/ex "$NIX_VOLUME_MOUNTD_DEST" <&1)" = *"Read-only file system" ]]
+ case "$(/usr/bin/touch / 2>&1)" in
+ *"Read-only file system") # Catalina, Big Sur
+ return 0
+ ;;
+ *"Operation not permitted") # Monterey
+ return 0
+ ;;
+ *)
+ return 1
+ ;;
+ esac
# Avoiding the slow semantic way to get this information (~330ms vs ~8ms)
# unless using touch causes problems. Just in case, that approach is:
@@ -67,7 +78,7 @@ poly_service_installed_check() {
poly_service_uninstall_directions() {
echo "$1. Remove macOS-specific components:"
if should_create_volume && test_nix_volume_mountd_installed; then
- darwin_volume_uninstall_directions
+ nix_volume_mountd_uninstall_directions
fi
if test_nix_daemon_installed; then
nix_daemon_uninstall_directions
@@ -207,7 +218,7 @@ EOF
setup_darwin_volume
fi
- if [ "$(diskutil info -plist /nix | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "" ]; then
- failure "This script needs a /nix volume with global permissions! This may require running sudo diskutil enableOwnership /nix."
+ if [ "$(/usr/sbin/diskutil info -plist /nix | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "" ]; then
+ failure "This script needs a /nix volume with global permissions! This may require running sudo /usr/sbin/diskutil enableOwnership /nix."
fi
}
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 513127a62..d3ed53d09 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -377,6 +377,11 @@ cure_artifacts() {
}
validate_starting_assumptions() {
+ task "Checking for artifacts of previous installs"
+ cat < /dev/null >&2; then
warning </dev/null)" command -vp chown)"
if [[ -z "$get_chr_own" ]]; then
get_chr_own="$(command -v chown)"
fi
- _sudo "to take root ownership of existing Nix store files" \
- "$get_chr_own" -R "root:$NIX_BUILD_GROUP_NAME" "$NIX_ROOT" || true
+
+ if [[ -z "$get_chr_own" ]]; then
+ reminder <&2
+ echo "Note: a multi-user installation is possible. See https://nixos.org/manual/nix/stable/installation/installing-binary.html#multi-user-installation" >&2
fi
case "$(uname -s)" in
@@ -98,7 +98,7 @@ while [ $# -gt 0 ]; do
echo " providing multi-user support and better isolation for local builds."
echo " Both for security and reproducibility, this method is recommended if"
echo " supported on your platform."
- echo " See https://nixos.org/nix/manual/#sect-multi-user-installation"
+ echo " See https://nixos.org/manual/nix/stable/installation/installing-binary.html#multi-user-installation"
echo ""
echo " --no-daemon: Simple, single-user installation that does not require root and is"
echo " trivial to uninstall."
@@ -134,7 +134,7 @@ fi
echo "performing a single-user installation of Nix..." >&2
-if ! [ -e $dest ]; then
+if ! [ -e "$dest" ]; then
cmd="mkdir -m 0755 $dest && chown $USER $dest"
echo "directory $dest does not exist; creating it by running '$cmd' using sudo" >&2
if ! sudo sh -c "$cmd"; then
@@ -143,12 +143,12 @@ if ! [ -e $dest ]; then
fi
fi
-if ! [ -w $dest ]; then
- echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see https://nixos.org/nix/manual/#ssec-multi-user. If you wish to continue with a single-user install for $USER please run 'chown -R $USER $dest' as root." >&2
+if ! [ -w "$dest" ]; then
+ echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see https://nixos.org/manual/nix/stable/installation/multi-user.html. If you wish to continue with a single-user install for $USER please run 'chown -R $USER $dest' as root." >&2
exit 1
fi
-mkdir -p $dest/store
+mkdir -p "$dest/store"
printf "copying Nix to %s..." "${dest}/store" >&2
# Insert a newline if no progress is shown.
@@ -189,17 +189,17 @@ fi
# Install an SSL certificate bundle.
if [ -z "$NIX_SSL_CERT_FILE" ] || ! [ -f "$NIX_SSL_CERT_FILE" ]; then
- $nix/bin/nix-env -i "$cacert"
+ "$nix/bin/nix-env" -i "$cacert"
export NIX_SSL_CERT_FILE="$HOME/.nix-profile/etc/ssl/certs/ca-bundle.crt"
fi
# Subscribe the user to the Nixpkgs channel and fetch it.
if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
- if ! $nix/bin/nix-channel --list | grep -q "^nixpkgs "; then
- $nix/bin/nix-channel --add https://nixos.org/channels/nixpkgs-unstable
+ if ! "$nix/bin/nix-channel" --list | grep -q "^nixpkgs "; then
+ "$nix/bin/nix-channel" --add https://nixos.org/channels/nixpkgs-unstable
fi
if [ -z "$_NIX_INSTALLER_TEST" ]; then
- if ! $nix/bin/nix-channel --update nixpkgs; then
+ if ! "$nix/bin/nix-channel" --update nixpkgs; then
echo "Fetching the nixpkgs channel failed. (Are you offline?)"
echo "To try again later, run \"nix-channel --update nixpkgs\"."
fi
@@ -215,7 +215,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
if [ -w "$fn" ]; then
if ! grep -q "$p" "$fn"; then
echo "modifying $fn..." >&2
- echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
+ printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
fi
added=1
break
@@ -226,7 +226,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
if [ -w "$fn" ]; then
if ! grep -q "$p" "$fn"; then
echo "modifying $fn..." >&2
- echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
+ printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
fi
added=1
break
diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh
index 81c61b2a0..f4a2dfc5d 100755
--- a/scripts/install-systemd-multi-user.sh
+++ b/scripts/install-systemd-multi-user.sh
@@ -15,7 +15,7 @@ readonly SERVICE_OVERRIDE=${SERVICE_DEST}.d/override.conf
create_systemd_override() {
header "Configuring proxy for the nix-daemon service"
- _sudo "create directory for systemd unit override" mkdir -p "$(dirname $SERVICE_OVERRIDE)"
+ _sudo "create directory for systemd unit override" mkdir -p "$(dirname "$SERVICE_OVERRIDE")"
cat < /dev/null 2>&1; then
- fetch() { wget "$1" -O "$2"; }
-elif command -v curl > /dev/null 2>&1; then
+if command -v curl > /dev/null 2>&1; then
fetch() { curl -L "$1" -o "$2"; }
+elif command -v wget > /dev/null 2>&1; then
+ fetch() { wget "$1" -O "$2"; }
else
oops "you don't have wget or curl installed, which I need to download the binary tarball"
fi
diff --git a/scripts/local.mk b/scripts/local.mk
index 2a0055852..b8477178e 100644
--- a/scripts/local.mk
+++ b/scripts/local.mk
@@ -1,7 +1,5 @@
nix_noinst_scripts := \
- $(d)/nix-http-export.cgi \
- $(d)/nix-profile.sh \
- $(d)/nix-reduce-build
+ $(d)/nix-profile.sh
noinst-scripts += $(nix_noinst_scripts)
diff --git a/scripts/nix-http-export.cgi.in b/scripts/nix-http-export.cgi.in
deleted file mode 100755
index 19a505af1..000000000
--- a/scripts/nix-http-export.cgi.in
+++ /dev/null
@@ -1,51 +0,0 @@
-#! /bin/sh
-
-export HOME=/tmp
-export NIX_REMOTE=daemon
-
-TMP_DIR="${TMP_DIR:-/tmp/nix-export}"
-
-@coreutils@/mkdir -p "$TMP_DIR" || true
-@coreutils@/chmod a+r "$TMP_DIR"
-
-needed_path="?$QUERY_STRING"
-needed_path="${needed_path#*[?&]needed_path=}"
-needed_path="${needed_path%%&*}"
-#needed_path="$(echo $needed_path | ./unhttp)"
-needed_path="${needed_path//%2B/+}"
-needed_path="${needed_path//%3D/=}"
-
-echo needed_path: "$needed_path" >&2
-
-NIX_STORE="${NIX_STORE_DIR:-/nix/store}"
-
-echo NIX_STORE: "${NIX_STORE}" >&2
-
-full_path="${NIX_STORE}"/"$needed_path"
-
-if [ "$needed_path" != "${needed_path%.drv}" ]; then
- echo "Status: 403 You should create the derivation file yourself"
- echo "Content-Type: text/plain"
- echo
- echo "Refusing to disclose derivation contents"
- exit
-fi
-
-if @bindir@/nix-store --check-validity "$full_path"; then
- if ! [ -e nix-export/"$needed_path".nar.gz ]; then
- @bindir@/nix-store --export "$full_path" | @gzip@ > "$TMP_DIR"/"$needed_path".nar.gz
- @coreutils@/ln -fs "$TMP_DIR"/"$needed_path".nar.gz nix-export/"$needed_path".nar.gz
- fi;
- echo "Status: 301 Moved"
- echo "Location: nix-export/"$needed_path".nar.gz"
- echo
-else
- echo "Status: 404 No such path found"
- echo "Content-Type: text/plain"
- echo
- echo "Path not found:"
- echo "$needed_path"
- echo "checked:"
- echo "$full_path"
-fi
-
diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in
index 500a98992..0a47571ac 100644
--- a/scripts/nix-profile-daemon.sh.in
+++ b/scripts/nix-profile-daemon.sh.in
@@ -5,7 +5,7 @@ __ETC_PROFILE_NIX_SOURCED=1
export NIX_PROFILES="@localstatedir@/nix/profiles/default $HOME/.nix-profile"
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
-if [ ! -z "${NIX_SSL_CERT_FILE:-}" ]; then
+if [ -n "${NIX_SSL_CERT_FILE:-}" ]; then
: # Allow users to override the NIX_SSL_CERT_FILE
elif [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
@@ -18,14 +18,14 @@ elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS
else
# Fall back to what is in the nix profiles, favouring whatever is defined last.
check_nix_profiles() {
- if [ "$ZSH_VERSION" ]; then
+ if [ -n "$ZSH_VERSION" ]; then
# Zsh by default doesn't split words in unquoted parameter expansion.
# Set local_options for these options to be reverted at the end of the function
# and shwordsplit to force splitting words in $NIX_PROFILES below.
setopt local_options shwordsplit
fi
for i in $NIX_PROFILES; do
- if [ -e $i/etc/ssl/certs/ca-bundle.crt ]; then
+ if [ -e "$i/etc/ssl/certs/ca-bundle.crt" ]; then
export NIX_SSL_CERT_FILE=$i/etc/ssl/certs/ca-bundle.crt
fi
done
diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in
index 8cba1c522..45cbcbe74 100644
--- a/scripts/nix-profile.sh.in
+++ b/scripts/nix-profile.sh.in
@@ -24,6 +24,9 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
fi
+ # Only use MANPATH if it is already set. In general `man` will just simply
+ # pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
+ # which is in the $PATH. For more info, run `manpath -d`.
if [ -n "${MANPATH-}" ]; then
export MANPATH="$NIX_LINK/share/man:$MANPATH"
fi
diff --git a/scripts/nix-reduce-build.in b/scripts/nix-reduce-build.in
deleted file mode 100755
index 50beb9d10..000000000
--- a/scripts/nix-reduce-build.in
+++ /dev/null
@@ -1,171 +0,0 @@
-#! @bash@
-
-WORKING_DIRECTORY=$(mktemp -d "${TMPDIR:-/tmp}"/nix-reduce-build-XXXXXX);
-cd "$WORKING_DIRECTORY";
-
-if test -z "$1" || test "a--help" = "a$1" ; then
- echo 'nix-reduce-build (paths or Nix expressions) -- (package sources)' >&2
- echo As in: >&2
- echo nix-reduce-build /etc/nixos/nixos -- ssh://user@somewhere.nowhere.example.org >&2
- echo nix-reduce-build /etc/nixos/nixos -- \\
- echo " " \''http://somewhere.nowhere.example.org/nix/nix-http-export.cgi?needed_path='\' >&2
- echo " store path name will be added into the end of the URL" >&2
- echo nix-reduce-build /etc/nixos/nixos -- file://home/user/nar/ >&2
- echo " that should be a directory where gzipped 'nix-store --export' ">&2
- echo " files are located (they should have .nar.gz extension)" >&2
- echo " Or all together: " >&2
- echo -e nix-reduce-build /expr.nix /e2.nix -- \\\\\\\n\
- " ssh://a@b.example.com http://n.example.com/get-nar?q= file://nar/" >&2
- echo " Also supports best-effort local builds of failing expression set:" >&2
- echo "nix-reduce-build /e.nix -- nix-daemon:// nix-self://" >&2
- echo " nix-daemon:// builds using daemon"
- echo " nix-self:// builds directly using nix-store from current installation" >&2
- echo " nix-daemon-fixed:// and nix-self-fixed:// do the same, but only for" >&2;
- echo "derivations with specified output hash (sha256, sha1 or md5)." >&2
- echo " nix-daemon-substitute:// and nix-self-substitute:// try to substitute" >&2;
- echo "maximum amount of paths" >&2;
- echo " nix-daemon-build:// and nix-self-build:// try to build (not substitute)" >&2;
- echo "maximum amount of paths" >&2;
- echo " If no package sources are specified, required paths are listed." >&2;
- exit;
-fi;
-
-while ! test "$1" = "--" || test "$1" = "" ; do
- echo "$1" >> initial; >&2
- shift;
-done
-shift;
-echo Will work on $(cat initial | wc -l) targets. >&2
-
-while read ; do
- case "$REPLY" in
- ${NIX_STORE_DIR:-/nix/store}/*)
- echo "$REPLY" >> paths; >&2
- ;;
- *)
- (
- IFS=: ;
- nix-instantiate $REPLY >> paths;
- );
- ;;
- esac;
-done < initial;
-echo Proceeding $(cat paths | wc -l) paths. >&2
-
-while read; do
- case "$REPLY" in
- *.drv)
- echo "$REPLY" >> derivers; >&2
- ;;
- *)
- nix-store --query --deriver "$REPLY" >>derivers;
- ;;
- esac;
-done < paths;
-echo Found $(cat derivers | wc -l) derivers. >&2
-
-cat derivers | xargs nix-store --query -R > derivers-closure;
-echo Proceeding at most $(cat derivers-closure | wc -l) derivers. >&2
-
-cat derivers-closure | egrep '[.]drv$' | xargs nix-store --query --outputs > wanted-paths;
-cat derivers-closure | egrep -v '[.]drv$' >> wanted-paths;
-echo Prepared $(cat wanted-paths | wc -l) paths to get. >&2
-
-cat wanted-paths | xargs nix-store --check-validity --print-invalid > needed-paths;
-echo We need $(cat needed-paths | wc -l) paths. >&2
-
-egrep '[.]drv$' derivers-closure > critical-derivers;
-
-if test -z "$1" ; then
- cat needed-paths;
-fi;
-
-refresh_critical_derivers() {
- echo "Finding needed derivers..." >&2;
- cat critical-derivers | while read; do
- if ! (nix-store --query --outputs "$REPLY" | xargs nix-store --check-validity &> /dev/null;); then
- echo "$REPLY";
- fi;
- done > new-critical-derivers;
- mv new-critical-derivers critical-derivers;
- echo The needed paths are realized by $(cat critical-derivers | wc -l) derivers. >&2
-}
-
-build_here() {
- cat critical-derivers | while read; do
- echo "Realising $REPLY using nix-daemon" >&2
- @bindir@/nix-store -r "${REPLY}"
- done;
-}
-
-try_to_substitute(){
- cat needed-paths | while read ; do
- echo "Building $REPLY using nix-daemon" >&2
- @bindir@/nix-store -r "${NIX_STORE_DIR:-/nix/store}/${REPLY##*/}"
- done;
-}
-
-for i in "$@"; do
- sshHost="${i#ssh://}";
- httpHost="${i#http://}";
- httpsHost="${i#https://}";
- filePath="${i#file:/}";
- if [ "$i" != "$sshHost" ]; then
- cat needed-paths | while read; do
- echo "Getting $REPLY and its closure over ssh" >&2
- nix-copy-closure --from "$sshHost" --gzip "$REPLY" &2
- curl ${BAD_CERTIFICATE:+-k} -L "$i${REPLY##*/}" | gunzip | nix-store --import;
- done;
- elif [ "$i" != "$filePath" ] ; then
- cat needed-paths | while read; do
- echo "Installing $REPLY from file" >&2
- gunzip < "$filePath/${REPLY##*/}".nar.gz | nix-store --import;
- done;
- elif [ "$i" = "nix-daemon://" ] ; then
- NIX_REMOTE=daemon try_to_substitute;
- refresh_critical_derivers;
- NIX_REMOTE=daemon build_here;
- elif [ "$i" = "nix-self://" ] ; then
- NIX_REMOTE= try_to_substitute;
- refresh_critical_derivers;
- NIX_REMOTE= build_here;
- elif [ "$i" = "nix-daemon-fixed://" ] ; then
- refresh_critical_derivers;
-
- cat critical-derivers | while read; do
- if egrep '"(md5|sha1|sha256)"' "$REPLY" &>/dev/null; then
- echo "Realising $REPLY using nix-daemon" >&2
- NIX_REMOTE=daemon @bindir@/nix-store -r "${REPLY}"
- fi;
- done;
- elif [ "$i" = "nix-self-fixed://" ] ; then
- refresh_critical_derivers;
-
- cat critical-derivers | while read; do
- if egrep '"(md5|sha1|sha256)"' "$REPLY" &>/dev/null; then
- echo "Realising $REPLY using direct Nix build" >&2
- NIX_REMOTE= @bindir@/nix-store -r "${REPLY}"
- fi;
- done;
- elif [ "$i" = "nix-daemon-substitute://" ] ; then
- NIX_REMOTE=daemon try_to_substitute;
- elif [ "$i" = "nix-self-substitute://" ] ; then
- NIX_REMOTE= try_to_substitute;
- elif [ "$i" = "nix-daemon-build://" ] ; then
- refresh_critical_derivers;
- NIX_REMOTE=daemon build_here;
- elif [ "$i" = "nix-self-build://" ] ; then
- refresh_critical_derivers;
- NIX_REMOTE= build_here;
- fi;
- mv needed-paths wanted-paths;
- cat wanted-paths | xargs nix-store --check-validity --print-invalid > needed-paths;
- echo We still need $(cat needed-paths | wc -l) paths. >&2
-done;
-
-cd /
-rm -r "$WORKING_DIRECTORY"
diff --git a/scripts/prepare-installer-for-github-actions b/scripts/prepare-installer-for-github-actions
index 92d930384..4b994a753 100755
--- a/scripts/prepare-installer-for-github-actions
+++ b/scripts/prepare-installer-for-github-actions
@@ -3,7 +3,7 @@
set -e
script=$(nix-build -A outputs.hydraJobs.installerScriptForGHA --no-out-link)
-installerHash=$(echo $script | cut -b12-43 -)
+installerHash=$(echo "$script" | cut -b12-43 -)
installerURL=https://$CACHIX_NAME.cachix.org/serve/$installerHash/install
diff --git a/shell.nix b/shell.nix
index 330df0ab6..918f4bbd9 100644
--- a/shell.nix
+++ b/shell.nix
@@ -1,3 +1,3 @@
-(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
+(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
src = ./.;
}).shellNix
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 0559aeaf4..9d2eacb54 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -18,6 +18,7 @@
#include "derivations.hh"
#include "local-store.hh"
#include "legacy.hh"
+#include "experimental-features.hh"
using namespace nix;
using std::cin;
@@ -31,7 +32,7 @@ std::string escapeUri(std::string uri)
return uri;
}
-static string currentLoad;
+static std::string currentLoad;
static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
{
@@ -96,7 +97,7 @@ static int main_build_remote(int argc, char * * argv)
}
std::optional drvPath;
- string storeUri;
+ std::string storeUri;
while (true) {
@@ -130,11 +131,14 @@ static int main_build_remote(int argc, char * * argv)
for (auto & m : machines) {
debug("considering building on remote machine '%s'", m.storeUri);
- if (m.enabled && std::find(m.systemTypes.begin(),
- m.systemTypes.end(),
- neededSystem) != m.systemTypes.end() &&
+ if (m.enabled
+ && (neededSystem == "builtin"
+ || std::find(m.systemTypes.begin(),
+ m.systemTypes.end(),
+ neededSystem) != m.systemTypes.end()) &&
m.allSupported(requiredFeatures) &&
- m.mandatoryMet(requiredFeatures)) {
+ m.mandatoryMet(requiredFeatures))
+ {
rightType = true;
AutoCloseFD free;
uint64_t load = 0;
@@ -179,7 +183,7 @@ static int main_build_remote(int argc, char * * argv)
else
{
// build the hint template.
- string errorText =
+ std::string errorText =
"Failed to find a machine for remote build!\n"
"derivation: %s\nrequired (system, features): (%s, %s)";
errorText += "\n%s available machines:";
@@ -189,7 +193,7 @@ static int main_build_remote(int argc, char * * argv)
errorText += "\n(%s, %s, %s, %s)";
// add the template values.
- string drvstr;
+ std::string drvstr;
if (drvPath.has_value())
drvstr = drvPath->to_string();
else
@@ -204,7 +208,7 @@ static int main_build_remote(int argc, char * * argv)
for (auto & m : machines)
error
- % concatStringsSep>(", ", m.systemTypes)
+ % concatStringsSep>(", ", m.systemTypes)
% m.maxJobs
% concatStringsSep(", ", m.supportedFeatures)
% concatStringsSep(", ", m.mandatoryFeatures);
@@ -295,7 +299,7 @@ connected:
std::set missingRealisations;
StorePathSet missingPaths;
- if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
for (auto & outputName : wantedOutputs) {
auto thisOutputHash = outputHashes.at(outputName);
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
@@ -327,7 +331,7 @@ connected:
for (auto & realisation : missingRealisations) {
// Should hold, because if the feature isn't enabled the set
// of missing realisations should be empty
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
store->registerDrvOutput(realisation);
}
diff --git a/src/cpptoml/LICENSE b/src/cpptoml/LICENSE
deleted file mode 100644
index 8802c4fa5..000000000
--- a/src/cpptoml/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-Copyright (c) 2014 Chase Geigle
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/src/cpptoml/cpptoml.h b/src/cpptoml/cpptoml.h
deleted file mode 100644
index 5a00da3b4..000000000
--- a/src/cpptoml/cpptoml.h
+++ /dev/null
@@ -1,3668 +0,0 @@
-/**
- * @file cpptoml.h
- * @author Chase Geigle
- * @date May 2013
- */
-
-#ifndef CPPTOML_H
-#define CPPTOML_H
-
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include