mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 22:16:16 +02:00
Merge remote-tracking branch 'upstream/master' into indexed-store-path-outputs
This commit is contained in:
commit
6cafe308c9
100 changed files with 1507 additions and 766 deletions
9
.github/stale.yml
vendored
9
.github/stale.yml
vendored
|
@ -1,10 +1,9 @@
|
||||||
# Configuration for probot-stale - https://github.com/probot/stale
|
# Configuration for probot-stale - https://github.com/probot/stale
|
||||||
daysUntilStale: 180
|
daysUntilStale: 180
|
||||||
daysUntilClose: 365
|
daysUntilClose: false
|
||||||
exemptLabels:
|
exemptLabels:
|
||||||
- "critical"
|
- "critical"
|
||||||
|
- "never-stale"
|
||||||
staleLabel: "stale"
|
staleLabel: "stale"
|
||||||
markComment: |
|
markComment: false
|
||||||
I marked this as stale due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
|
closeComment: false
|
||||||
closeComment: |
|
|
||||||
I closed this issue due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
|
|
||||||
|
|
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
|
@ -4,6 +4,8 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
|
@ -28,6 +30,8 @@ jobs:
|
||||||
- run: nix --experimental-features 'nix-command flakes' flake check -L
|
- run: nix --experimental-features 'nix-command flakes' flake check -L
|
||||||
|
|
||||||
check_cachix:
|
check_cachix:
|
||||||
|
permissions:
|
||||||
|
contents: none
|
||||||
name: Cachix secret present for installer tests
|
name: Cachix secret present for installer tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
|
@ -88,7 +92,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v17
|
- uses: cachix/install-nix-action@v17
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
|
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v10
|
||||||
if: needs.check_cachix.outputs.secret == 'true'
|
if: needs.check_cachix.outputs.secret == 'true'
|
||||||
with:
|
with:
|
||||||
|
|
4
.github/workflows/hydra_status.yml
vendored
4
.github/workflows/hydra_status.yml
vendored
|
@ -1,8 +1,12 @@
|
||||||
name: Hydra status
|
name: Hydra status
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "12,42 * * * *"
|
- cron: "12,42 * * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_hydra_status:
|
check_hydra_status:
|
||||||
name: Check Hydra status
|
name: Check Hydra status
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
HOST_OS = @host_os@
|
|
||||||
AR = @AR@
|
AR = @AR@
|
||||||
BDW_GC_LIBS = @BDW_GC_LIBS@
|
BDW_GC_LIBS = @BDW_GC_LIBS@
|
||||||
BOOST_LDFLAGS = @BOOST_LDFLAGS@
|
BOOST_LDFLAGS = @BOOST_LDFLAGS@
|
||||||
|
@ -13,13 +12,14 @@ ENABLE_S3 = @ENABLE_S3@
|
||||||
GTEST_LIBS = @GTEST_LIBS@
|
GTEST_LIBS = @GTEST_LIBS@
|
||||||
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
||||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
HAVE_SECCOMP = @HAVE_SECCOMP@
|
||||||
|
HOST_OS = @host_os@
|
||||||
LDFLAGS = @LDFLAGS@
|
LDFLAGS = @LDFLAGS@
|
||||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
||||||
LIBCURL_LIBS = @LIBCURL_LIBS@
|
LIBCURL_LIBS = @LIBCURL_LIBS@
|
||||||
|
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
||||||
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||||
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||||
SHELL = @bash@
|
SHELL = @bash@
|
||||||
|
@ -31,6 +31,7 @@ datadir = @datadir@
|
||||||
datarootdir = @datarootdir@
|
datarootdir = @datarootdir@
|
||||||
doc_generate = @doc_generate@
|
doc_generate = @doc_generate@
|
||||||
docdir = @docdir@
|
docdir = @docdir@
|
||||||
|
embedded_sandbox_shell = @embedded_sandbox_shell@
|
||||||
exec_prefix = @exec_prefix@
|
exec_prefix = @exec_prefix@
|
||||||
includedir = @includedir@
|
includedir = @includedir@
|
||||||
libdir = @libdir@
|
libdir = @libdir@
|
||||||
|
|
|
@ -320,6 +320,14 @@ if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
|
||||||
|
embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
|
||||||
|
AC_SUBST(embedded_sandbox_shell)
|
||||||
|
if test "$embedded_sandbox_shell" = yes; then
|
||||||
|
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
# Expand all variables in config.status.
|
# Expand all variables in config.status.
|
||||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
test "$prefix" = NONE && prefix=$ac_default_prefix
|
||||||
test "$exec_prefix" = NONE && exec_prefix='${prefix}'
|
test "$exec_prefix" = NONE && exec_prefix='${prefix}'
|
||||||
|
|
|
@ -72,6 +72,7 @@
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/release-notes.md)
|
||||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||||
|
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
|
||||||
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
|
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
|
||||||
- [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md)
|
- [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md)
|
||||||
- [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md)
|
- [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md)
|
||||||
|
|
|
@ -12,6 +12,12 @@
|
||||||
[`--dry-run`]
|
[`--dry-run`]
|
||||||
[{`--out-link` | `-o`} *outlink*]
|
[{`--out-link` | `-o`} *outlink*]
|
||||||
|
|
||||||
|
# Disambiguation
|
||||||
|
|
||||||
|
This man page describes the command `nix-build`, which is distinct from `nix
|
||||||
|
build`. For documentation on the latter, run `nix build --help` or see `man
|
||||||
|
nix3-build`.
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
The `nix-build` command builds the derivations described by the Nix
|
The `nix-build` command builds the derivations described by the Nix
|
||||||
|
|
|
@ -31,7 +31,7 @@ subcommand to be performed. These are documented below.
|
||||||
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
|
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
|
||||||
arguments that specify the packages on which to operate. These are
|
arguments that specify the packages on which to operate. These are
|
||||||
extended regular expressions that must match the entire name of the
|
extended regular expressions that must match the entire name of the
|
||||||
package. (For details on regular expressions, see regex7.) The match is
|
package. (For details on regular expressions, see **regex**(7).) The match is
|
||||||
case-sensitive. The regular expression can optionally be followed by a
|
case-sensitive. The regular expression can optionally be followed by a
|
||||||
dash and a version number; if omitted, any version of the package will
|
dash and a version number; if omitted, any version of the package will
|
||||||
match. Here are some examples:
|
match. Here are some examples:
|
||||||
|
@ -412,7 +412,7 @@ The upgrade operation determines whether a derivation `y` is an upgrade
|
||||||
of a derivation `x` by looking at their respective `name` attributes.
|
of a derivation `x` by looking at their respective `name` attributes.
|
||||||
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
|
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
|
||||||
(`gcc`), and the version (`3.3.1`). The version part starts after the
|
(`gcc`), and the version (`3.3.1`). The version part starts after the
|
||||||
first dash not followed by a letter. `x` is considered an upgrade of `y`
|
first dash not followed by a letter. `y` is considered an upgrade of `x`
|
||||||
if their package names match, and the version of `y` is higher than that
|
if their package names match, and the version of `y` is higher than that
|
||||||
of `x`.
|
of `x`.
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,12 @@
|
||||||
[`--keep` *name*]
|
[`--keep` *name*]
|
||||||
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
|
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
|
||||||
|
|
||||||
|
# Disambiguation
|
||||||
|
|
||||||
|
This man page describes the command `nix-shell`, which is distinct from `nix
|
||||||
|
shell`. For documentation on the latter, run `nix shell --help` or see `man
|
||||||
|
nix3-shell`.
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
The command `nix-shell` will build the dependencies of the specified
|
The command `nix-shell` will build the dependencies of the specified
|
||||||
|
|
31
doc/manual/src/release-notes/rl-2.10.md
Normal file
31
doc/manual/src/release-notes/rl-2.10.md
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Release 2.10 (2022-07-11)
|
||||||
|
|
||||||
|
* `nix repl` now takes installables on the command line, unifying the usage
|
||||||
|
with other commands that use `--file` and `--expr`. Primary breaking change
|
||||||
|
is for the common usage of `nix repl '<nixpkgs>'` which can be recovered with
|
||||||
|
`nix repl --file '<nixpkgs>'` or `nix repl --expr 'import <nixpkgs>{}'`.
|
||||||
|
|
||||||
|
This is currently guarded by the `repl-flake` experimental feature.
|
||||||
|
|
||||||
|
* A new function `builtins.traceVerbose` is available. It is similar
|
||||||
|
to `builtins.trace` if the `trace-verbose` setting is set to true,
|
||||||
|
and it is a no-op otherwise.
|
||||||
|
|
||||||
|
* `nix search` has a new flag `--exclude` to filter out packages.
|
||||||
|
|
||||||
|
* On Linux, if `/nix` doesn't exist and cannot be created and you're
|
||||||
|
not running as root, Nix will automatically use
|
||||||
|
`~/.local/share/nix/root` as a chroot store. This enables non-root
|
||||||
|
users to download the statically linked Nix binary and have it work
|
||||||
|
out of the box, e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
# ~/nix run nixpkgs#hello
|
||||||
|
warning: '/nix' does not exists, so Nix will use '/home/ubuntu/.local/share/nix/root' as a chroot store
|
||||||
|
Hello, world!
|
||||||
|
```
|
||||||
|
|
||||||
|
* `flake-registry.json` is now fetched from `channels.nixos.org`.
|
||||||
|
|
||||||
|
* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
|
||||||
|
LTO is currently only supported when building with GCC.
|
|
@ -1,7 +1,4 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
|
|
||||||
LTO is currently only supported when building with GCC.
|
|
||||||
|
|
||||||
* Add experimental *indexed store derivations* installable syntax, part of the
|
* Add experimental *indexed store derivations* installable syntax, part of the
|
||||||
the `computed-derivations` experimental feature.
|
the `computed-derivations` experimental feature.
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
, tag ? "latest"
|
, tag ? "latest"
|
||||||
, channelName ? "nixpkgs"
|
, channelName ? "nixpkgs"
|
||||||
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
|
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
|
||||||
|
, extraPkgs ? []
|
||||||
|
, maxLayers ? 100
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
defaultPkgs = with pkgs; [
|
defaultPkgs = with pkgs; [
|
||||||
|
@ -23,7 +25,7 @@ let
|
||||||
iana-etc
|
iana-etc
|
||||||
git
|
git
|
||||||
openssh
|
openssh
|
||||||
];
|
] ++ extraPkgs;
|
||||||
|
|
||||||
users = {
|
users = {
|
||||||
|
|
||||||
|
@ -229,7 +231,7 @@ let
|
||||||
in
|
in
|
||||||
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
||||||
|
|
||||||
inherit name tag;
|
inherit name tag maxLayers;
|
||||||
|
|
||||||
contents = [ baseSystem ];
|
contents = [ baseSystem ];
|
||||||
|
|
||||||
|
|
|
@ -18,16 +18,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1645296114,
|
"lastModified": 1653988320,
|
||||||
"narHash": "sha256-y53N7TyIkXsjMpOG7RhvqJFGDacLs9HlyHeSTBioqYU=",
|
"narHash": "sha256-ZaqFFsSDipZ6KVqriwM34T739+KLYJvNmCWzErjAg7c=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "530a53dcbc9437363471167a5e4762c5fcfa34a1",
|
"rev": "2fa57ed190fd6c7c746319444f34b5917666e5c1",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-21.05-small",
|
"ref": "nixos-22.05-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
90
flake.nix
90
flake.nix
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-21.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small";
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
|
forAllStdenvs = f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
|
||||||
|
|
||||||
# Memoize nixpkgs for different platforms for efficiency.
|
# Memoize nixpkgs for different platforms for efficiency.
|
||||||
nixpkgsFor =
|
nixpkgsFor =
|
||||||
|
@ -54,7 +54,7 @@
|
||||||
# we want most of the time and for backwards compatibility
|
# we want most of the time and for backwards compatibility
|
||||||
forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
|
forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
|
||||||
|
|
||||||
commonDeps = pkgs: with pkgs; rec {
|
commonDeps = { pkgs, isStatic ? false }: with pkgs; rec {
|
||||||
# Use "busybox-sandbox-shell" if present,
|
# Use "busybox-sandbox-shell" if present,
|
||||||
# if not (legacy) fallback and hope it's sufficient.
|
# if not (legacy) fallback and hope it's sufficient.
|
||||||
sh = pkgs.busybox-sandbox-shell or (busybox.override {
|
sh = pkgs.busybox-sandbox-shell or (busybox.override {
|
||||||
|
@ -85,10 +85,11 @@
|
||||||
lib.optionals stdenv.isLinux [
|
lib.optionals stdenv.isLinux [
|
||||||
"--with-boost=${boost}/lib"
|
"--with-boost=${boost}/lib"
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||||
|
]
|
||||||
|
++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [
|
||||||
"LDFLAGS=-fuse-ld=gold"
|
"LDFLAGS=-fuse-ld=gold"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
||||||
nativeBuildDeps =
|
nativeBuildDeps =
|
||||||
[
|
[
|
||||||
buildPackages.bison
|
buildPackages.bison
|
||||||
|
@ -171,7 +172,7 @@
|
||||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||||
'';
|
'';
|
||||||
|
|
||||||
testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation {
|
testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation {
|
||||||
NIX_DAEMON_PACKAGE = daemon;
|
NIX_DAEMON_PACKAGE = daemon;
|
||||||
NIX_CLIENT_PACKAGE = client;
|
NIX_CLIENT_PACKAGE = client;
|
||||||
name =
|
name =
|
||||||
|
@ -282,7 +283,7 @@
|
||||||
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
||||||
nixUnstable = prev.nixUnstable;
|
nixUnstable = prev.nixUnstable;
|
||||||
|
|
||||||
nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
|
nix = with final; with commonDeps { inherit pkgs; }; currentStdenv.mkDerivation {
|
||||||
name = "nix-${version}";
|
name = "nix-${version}";
|
||||||
inherit version;
|
inherit version;
|
||||||
|
|
||||||
|
@ -314,6 +315,7 @@
|
||||||
for LIB in $out/lib/*.dylib; do
|
for LIB in $out/lib/*.dylib; do
|
||||||
chmod u+w $LIB
|
chmod u+w $LIB
|
||||||
install_name_tool -id $LIB $LIB
|
install_name_tool -id $LIB $LIB
|
||||||
|
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
|
||||||
done
|
done
|
||||||
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
||||||
''}
|
''}
|
||||||
|
@ -370,10 +372,10 @@
|
||||||
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
|
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
|
||||||
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||||
|
|
||||||
configureFlags = ''
|
configureFlags = [
|
||||||
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
"--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}"
|
||||||
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}"
|
||||||
'';
|
];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
@ -405,7 +407,7 @@
|
||||||
|
|
||||||
# A Nixpkgs overlay that overrides the 'nix' and
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
# 'nix.perl-bindings' packages.
|
# 'nix.perl-bindings' packages.
|
||||||
overlay = overlayFor (p: p.stdenv);
|
overlays.default = overlayFor (p: p.stdenv);
|
||||||
|
|
||||||
hydraJobs = {
|
hydraJobs = {
|
||||||
|
|
||||||
|
@ -430,7 +432,7 @@
|
||||||
value = let
|
value = let
|
||||||
nixpkgsCross = import nixpkgs {
|
nixpkgsCross = import nixpkgs {
|
||||||
inherit system crossSystem;
|
inherit system crossSystem;
|
||||||
overlays = [ self.overlay ];
|
overlays = [ self.overlays.default ];
|
||||||
};
|
};
|
||||||
in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross;
|
in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross;
|
||||||
}) crossSystems));
|
}) crossSystems));
|
||||||
|
@ -448,7 +450,7 @@
|
||||||
# Line coverage analysis.
|
# Line coverage analysis.
|
||||||
coverage =
|
coverage =
|
||||||
with nixpkgsFor.x86_64-linux;
|
with nixpkgsFor.x86_64-linux;
|
||||||
with commonDeps pkgs;
|
with commonDeps { inherit pkgs; };
|
||||||
|
|
||||||
releaseTools.coverageAnalysis {
|
releaseTools.coverageAnalysis {
|
||||||
name = "nix-coverage-${version}";
|
name = "nix-coverage-${version}";
|
||||||
|
@ -476,31 +478,31 @@
|
||||||
tests.remoteBuilds = import ./tests/remote-builds.nix {
|
tests.remoteBuilds = import ./tests/remote-builds.nix {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
inherit nixpkgs;
|
inherit nixpkgs;
|
||||||
inherit (self) overlay;
|
overlay = self.overlays.default;
|
||||||
};
|
};
|
||||||
|
|
||||||
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
|
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
inherit nixpkgs;
|
inherit nixpkgs;
|
||||||
inherit (self) overlay;
|
overlay = self.overlays.default;
|
||||||
};
|
};
|
||||||
|
|
||||||
tests.nssPreload = (import ./tests/nss-preload.nix rec {
|
tests.nssPreload = (import ./tests/nss-preload.nix rec {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
inherit nixpkgs;
|
inherit nixpkgs;
|
||||||
inherit (self) overlay;
|
overlay = self.overlays.default;
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
inherit nixpkgs;
|
inherit nixpkgs;
|
||||||
inherit (self) overlay;
|
overlay = self.overlays.default;
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
|
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
inherit nixpkgs;
|
inherit nixpkgs;
|
||||||
inherit (self) overlay;
|
overlay = self.overlays.default;
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.setuid = nixpkgs.lib.genAttrs
|
tests.setuid = nixpkgs.lib.genAttrs
|
||||||
|
@ -508,7 +510,7 @@
|
||||||
(system:
|
(system:
|
||||||
import ./tests/setuid.nix rec {
|
import ./tests/setuid.nix rec {
|
||||||
inherit nixpkgs system;
|
inherit nixpkgs system;
|
||||||
inherit (self) overlay;
|
overlay = self.overlays.default;
|
||||||
});
|
});
|
||||||
|
|
||||||
# Make sure that nix-env still produces the exact same result
|
# Make sure that nix-env still produces the exact same result
|
||||||
|
@ -553,12 +555,13 @@
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
});
|
});
|
||||||
|
|
||||||
packages = forAllSystems (system: {
|
packages = forAllSystems (system: rec {
|
||||||
inherit (nixpkgsFor.${system}) nix;
|
inherit (nixpkgsFor.${system}) nix;
|
||||||
|
default = nix;
|
||||||
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
||||||
nix-static = let
|
nix-static = let
|
||||||
nixpkgs = nixpkgsFor.${system}.pkgsStatic;
|
nixpkgs = nixpkgsFor.${system}.pkgsStatic;
|
||||||
in with commonDeps nixpkgs; nixpkgs.stdenv.mkDerivation {
|
in with commonDeps { pkgs = nixpkgs; isStatic = true; }; nixpkgs.stdenv.mkDerivation {
|
||||||
name = "nix-${version}";
|
name = "nix-${version}";
|
||||||
|
|
||||||
src = self;
|
src = self;
|
||||||
|
@ -570,14 +573,24 @@
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
buildInputs = buildDeps ++ propagatedDeps;
|
buildInputs = buildDeps ++ propagatedDeps;
|
||||||
|
|
||||||
configureFlags = [ "--sysconfdir=/etc" ];
|
# Work around pkgsStatic disabling all tests.
|
||||||
|
# Remove in NixOS 22.11, see https://github.com/NixOS/nixpkgs/pull/140271.
|
||||||
|
preHook =
|
||||||
|
''
|
||||||
|
doCheck=1
|
||||||
|
doInstallCheck=1
|
||||||
|
'';
|
||||||
|
|
||||||
|
configureFlags =
|
||||||
|
configureFlags ++
|
||||||
|
[ "--sysconfdir=/etc"
|
||||||
|
"--enable-embedded-sandbox-shell"
|
||||||
|
];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d";
|
makeFlags = "profiledir=$(out)/etc/profile.d";
|
||||||
|
|
||||||
doCheck = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
|
@ -587,7 +600,6 @@
|
||||||
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
||||||
'';
|
'';
|
||||||
|
|
||||||
doInstallCheck = true;
|
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
stripAllList = ["bin"];
|
stripAllList = ["bin"];
|
||||||
|
@ -596,6 +608,7 @@
|
||||||
|
|
||||||
hardeningDisable = [ "pie" ];
|
hardeningDisable = [ "pie" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
dockerImage =
|
dockerImage =
|
||||||
let
|
let
|
||||||
pkgs = nixpkgsFor.${system};
|
pkgs = nixpkgsFor.${system};
|
||||||
|
@ -610,14 +623,16 @@
|
||||||
ln -s ${image} $image
|
ln -s ${image} $image
|
||||||
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||||
'';
|
'';
|
||||||
} // builtins.listToAttrs (map (crossSystem: {
|
}
|
||||||
|
|
||||||
|
// builtins.listToAttrs (map (crossSystem: {
|
||||||
name = "nix-${crossSystem}";
|
name = "nix-${crossSystem}";
|
||||||
value = let
|
value = let
|
||||||
nixpkgsCross = import nixpkgs {
|
nixpkgsCross = import nixpkgs {
|
||||||
inherit system crossSystem;
|
inherit system crossSystem;
|
||||||
overlays = [ self.overlay ];
|
overlays = [ self.overlays.default ];
|
||||||
};
|
};
|
||||||
in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation {
|
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
|
||||||
name = "nix-${version}";
|
name = "nix-${version}";
|
||||||
|
|
||||||
src = self;
|
src = self;
|
||||||
|
@ -649,20 +664,18 @@
|
||||||
doInstallCheck = true;
|
doInstallCheck = true;
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
};
|
};
|
||||||
}) crossSystems)) // (builtins.listToAttrs (map (stdenvName:
|
}) (if system == "x86_64-linux" then crossSystems else [])))
|
||||||
|
|
||||||
|
// (builtins.listToAttrs (map (stdenvName:
|
||||||
nixpkgsFor.${system}.lib.nameValuePair
|
nixpkgsFor.${system}.lib.nameValuePair
|
||||||
"nix-${stdenvName}"
|
"nix-${stdenvName}"
|
||||||
nixpkgsFor.${system}."${stdenvName}Packages".nix
|
nixpkgsFor.${system}."${stdenvName}Packages".nix
|
||||||
) stdenvs)));
|
) stdenvs)));
|
||||||
|
|
||||||
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
|
devShells = forAllSystems (system:
|
||||||
|
forAllStdenvs (stdenv:
|
||||||
devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages);
|
|
||||||
|
|
||||||
devShells = forAllSystemsAndStdenvs (system: stdenv:
|
|
||||||
with nixpkgsFor.${system};
|
with nixpkgsFor.${system};
|
||||||
with commonDeps pkgs;
|
with commonDeps { inherit pkgs; };
|
||||||
|
|
||||||
nixpkgsFor.${system}.${stdenv}.mkDerivation {
|
nixpkgsFor.${system}.${stdenv}.mkDerivation {
|
||||||
name = "nix";
|
name = "nix";
|
||||||
|
|
||||||
|
@ -686,7 +699,10 @@
|
||||||
# Make bash completion work.
|
# Make bash completion work.
|
||||||
XDG_DATA_DIRS+=:$out/share
|
XDG_DATA_DIRS+=:$out/share
|
||||||
'';
|
'';
|
||||||
});
|
}
|
||||||
|
)
|
||||||
|
// { default = self.devShells.${system}.stdenv; }
|
||||||
|
);
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -125,7 +125,7 @@ define build-library
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
||||||
+$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
|
+$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^
|
||||||
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
|
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
|
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
|
||||||
|
|
|
@ -442,8 +442,9 @@ add_nix_vol_fstab_line() {
|
||||||
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
|
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
|
||||||
shift
|
shift
|
||||||
|
|
||||||
# wrap `ex` to work around a problem with vim plugins breaking exit codes
|
# wrap `ex` to work around problems w/ vim features breaking exit codes
|
||||||
# (see github.com/NixOS/nix/issues/5468)
|
# - plugins (see github.com/NixOS/nix/issues/5468): -u NONE
|
||||||
|
# - swap file: -n
|
||||||
#
|
#
|
||||||
# the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462
|
# the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462
|
||||||
# suggests we need the less-semantic `-u NONE`
|
# suggests we need the less-semantic `-u NONE`
|
||||||
|
@ -456,7 +457,7 @@ add_nix_vol_fstab_line() {
|
||||||
# minver 10.12.6 seems to have released with vim 7.4
|
# minver 10.12.6 seems to have released with vim 7.4
|
||||||
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
|
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
/usr/bin/ex -u NONE "\$@"
|
/usr/bin/ex -u NONE -n "\$@"
|
||||||
EOF
|
EOF
|
||||||
chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
|
chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
|
||||||
|
|
||||||
|
@ -650,9 +651,9 @@ EOF
|
||||||
task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2
|
task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2
|
||||||
# technically /etc/synthetic.d/nix is supported in Big Sur+
|
# technically /etc/synthetic.d/nix is supported in Big Sur+
|
||||||
# but handling both takes even more code...
|
# but handling both takes even more code...
|
||||||
# Note: `-u NONE` disables vim plugins/rc; see note on --clean earlier
|
# See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile
|
||||||
_sudo "to add Nix to /etc/synthetic.conf" \
|
_sudo "to add Nix to /etc/synthetic.conf" \
|
||||||
/usr/bin/ex -u NONE /etc/synthetic.conf <<EOF
|
/usr/bin/ex -u NONE -n /etc/synthetic.conf <<EOF
|
||||||
:a
|
:a
|
||||||
${NIX_ROOT:1}
|
${NIX_ROOT:1}
|
||||||
.
|
.
|
||||||
|
@ -820,8 +821,8 @@ setup_volume_daemon() {
|
||||||
local volume_uuid="$2"
|
local volume_uuid="$2"
|
||||||
if ! test_voldaemon; then
|
if ! test_voldaemon; then
|
||||||
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
|
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
|
||||||
# Note: `-u NONE` disables vim plugins/rc; see note on --clean earlier
|
# See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile
|
||||||
_sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE "$NIX_VOLUME_MOUNTD_DEST" <<EOF
|
_sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE -n "$NIX_VOLUME_MOUNTD_DEST" <<EOF
|
||||||
:a
|
:a
|
||||||
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
|
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
|
||||||
.
|
.
|
||||||
|
|
|
@ -638,6 +638,17 @@ place_channel_configuration() {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
check_selinux() {
|
||||||
|
if command -v getenforce > /dev/null 2>&1; then
|
||||||
|
if ! [ "$(getenforce)" = "Disabled" ]; then
|
||||||
|
failure <<EOF
|
||||||
|
Nix does not work with selinux enabled yet!
|
||||||
|
see https://github.com/NixOS/nix/issues/2374
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
welcome_to_nix() {
|
welcome_to_nix() {
|
||||||
ok "Welcome to the Multi-User Nix Installation"
|
ok "Welcome to the Multi-User Nix Installation"
|
||||||
|
|
||||||
|
@ -866,6 +877,8 @@ when I need to.
|
||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
check_selinux
|
||||||
|
|
||||||
if [ "$(uname -s)" = "Darwin" ]; then
|
if [ "$(uname -s)" = "Darwin" ]; then
|
||||||
# shellcheck source=./install-darwin-multi-user.sh
|
# shellcheck source=./install-darwin-multi-user.sh
|
||||||
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
|
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
|
||||||
|
|
|
@ -148,7 +148,9 @@ if ! [ -w "$dest" ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir -p "$dest/store"
|
# The auto-chroot code in openFromNonUri() checks for the
|
||||||
|
# non-existence of /nix/var/nix, so we need to create it here.
|
||||||
|
mkdir -p "$dest/store" "$dest/var/nix"
|
||||||
|
|
||||||
printf "copying Nix to %s..." "${dest}/store" >&2
|
printf "copying Nix to %s..." "${dest}/store" >&2
|
||||||
# Insert a newline if no progress is shown.
|
# Insert a newline if no progress is shown.
|
||||||
|
|
|
@ -58,6 +58,7 @@ struct CopyCommand : virtual StoreCommand
|
||||||
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
||||||
{
|
{
|
||||||
bool startReplOnEvalErrors = false;
|
bool startReplOnEvalErrors = false;
|
||||||
|
bool ignoreExceptionsDuringTry = false;
|
||||||
|
|
||||||
EvalCommand();
|
EvalCommand();
|
||||||
|
|
||||||
|
@ -77,10 +78,16 @@ struct MixFlakeOptions : virtual Args, EvalCommand
|
||||||
{
|
{
|
||||||
flake::LockFlags lockFlags;
|
flake::LockFlags lockFlags;
|
||||||
|
|
||||||
|
std::optional<std::string> needsFlakeInputCompletion = {};
|
||||||
|
|
||||||
MixFlakeOptions();
|
MixFlakeOptions();
|
||||||
|
|
||||||
virtual std::optional<FlakeRef> getFlakeRefForCompletion()
|
virtual std::vector<std::string> getFlakesForCompletion()
|
||||||
{ return {}; }
|
{ return {}; }
|
||||||
|
|
||||||
|
void completeFlakeInput(std::string_view prefix);
|
||||||
|
|
||||||
|
void completionHook() override;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct SourceExprCommand : virtual Args, MixFlakeOptions
|
struct SourceExprCommand : virtual Args, MixFlakeOptions
|
||||||
|
@ -116,12 +123,13 @@ struct InstallablesCommand : virtual Args, SourceExprCommand
|
||||||
InstallablesCommand();
|
InstallablesCommand();
|
||||||
|
|
||||||
void prepare() override;
|
void prepare() override;
|
||||||
|
Installables load();
|
||||||
|
|
||||||
virtual bool useDefaultInstallables() { return true; }
|
virtual bool useDefaultInstallables() { return true; }
|
||||||
|
|
||||||
std::optional<FlakeRef> getFlakeRefForCompletion() override;
|
std::vector<std::string> getFlakesForCompletion() override;
|
||||||
|
|
||||||
private:
|
protected:
|
||||||
|
|
||||||
std::vector<std::string> _installables;
|
std::vector<std::string> _installables;
|
||||||
};
|
};
|
||||||
|
@ -135,9 +143,9 @@ struct InstallableCommand : virtual Args, SourceExprCommand
|
||||||
|
|
||||||
void prepare() override;
|
void prepare() override;
|
||||||
|
|
||||||
std::optional<FlakeRef> getFlakeRefForCompletion() override
|
std::vector<std::string> getFlakesForCompletion() override
|
||||||
{
|
{
|
||||||
return parseFlakeRefWithFragment(_installable, absPath(".")).first;
|
return {_installable};
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -23,17 +23,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
void completeFlakeInputPath(
|
|
||||||
ref<EvalState> evalState,
|
|
||||||
const FlakeRef & flakeRef,
|
|
||||||
std::string_view prefix)
|
|
||||||
{
|
|
||||||
auto flake = flake::getFlake(*evalState, flakeRef, true);
|
|
||||||
for (auto & input : flake.inputs)
|
|
||||||
if (hasPrefix(input.first, prefix))
|
|
||||||
completions->add(input.first);
|
|
||||||
}
|
|
||||||
|
|
||||||
MixFlakeOptions::MixFlakeOptions()
|
MixFlakeOptions::MixFlakeOptions()
|
||||||
{
|
{
|
||||||
auto category = "Common flake-related options";
|
auto category = "Common flake-related options";
|
||||||
|
@ -86,8 +75,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
||||||
}},
|
}},
|
||||||
.completer = {[&](size_t, std::string_view prefix) {
|
.completer = {[&](size_t, std::string_view prefix) {
|
||||||
if (auto flakeRef = getFlakeRefForCompletion())
|
needsFlakeInputCompletion = {std::string(prefix)};
|
||||||
completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
|
|
||||||
}}
|
}}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -103,12 +91,10 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
parseFlakeRef(flakeRef, absPath("."), true));
|
parseFlakeRef(flakeRef, absPath("."), true));
|
||||||
}},
|
}},
|
||||||
.completer = {[&](size_t n, std::string_view prefix) {
|
.completer = {[&](size_t n, std::string_view prefix) {
|
||||||
if (n == 0) {
|
if (n == 0)
|
||||||
if (auto flakeRef = getFlakeRefForCompletion())
|
needsFlakeInputCompletion = {std::string(prefix)};
|
||||||
completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
|
else if (n == 1)
|
||||||
} else if (n == 1) {
|
|
||||||
completeFlakeRef(getEvalState()->store, prefix);
|
completeFlakeRef(getEvalState()->store, prefix);
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -139,6 +125,24 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
|
||||||
|
{
|
||||||
|
auto evalState = getEvalState();
|
||||||
|
for (auto & flakeRefS : getFlakesForCompletion()) {
|
||||||
|
auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
|
||||||
|
auto flake = flake::getFlake(*evalState, flakeRef, true);
|
||||||
|
for (auto & input : flake.inputs)
|
||||||
|
if (hasPrefix(input.first, prefix))
|
||||||
|
completions->add(input.first);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void MixFlakeOptions::completionHook()
|
||||||
|
{
|
||||||
|
if (auto & prefix = needsFlakeInputCompletion)
|
||||||
|
completeFlakeInput(*prefix);
|
||||||
|
}
|
||||||
|
|
||||||
SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
|
SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
|
@ -146,7 +150,8 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
|
||||||
.shortName = 'f',
|
.shortName = 'f',
|
||||||
.description =
|
.description =
|
||||||
"Interpret installables as attribute paths relative to the Nix expression stored in *file*. "
|
"Interpret installables as attribute paths relative to the Nix expression stored in *file*. "
|
||||||
"If *file* is the character -, then a Nix expression will be read from standard input.",
|
"If *file* is the character -, then a Nix expression will be read from standard input. "
|
||||||
|
"Implies `--impure`.",
|
||||||
.category = installablesCategory,
|
.category = installablesCategory,
|
||||||
.labels = {"file"},
|
.labels = {"file"},
|
||||||
.handler = {&file},
|
.handler = {&file},
|
||||||
|
@ -955,6 +960,9 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case Realise::Outputs: {
|
case Realise::Outputs: {
|
||||||
|
if (settings.printMissing)
|
||||||
|
printMissing(store, pathsToBuild, lvlInfo);
|
||||||
|
|
||||||
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
|
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
|
||||||
if (!buildResult.success())
|
if (!buildResult.success())
|
||||||
buildResult.rethrow();
|
buildResult.rethrow();
|
||||||
|
@ -1068,21 +1076,26 @@ InstallablesCommand::InstallablesCommand()
|
||||||
|
|
||||||
void InstallablesCommand::prepare()
|
void InstallablesCommand::prepare()
|
||||||
{
|
{
|
||||||
|
installables = load();
|
||||||
|
}
|
||||||
|
|
||||||
|
Installables InstallablesCommand::load() {
|
||||||
|
Installables installables;
|
||||||
if (_installables.empty() && useDefaultInstallables())
|
if (_installables.empty() && useDefaultInstallables())
|
||||||
// FIXME: commands like "nix profile install" should not have a
|
// FIXME: commands like "nix profile install" should not have a
|
||||||
// default, probably.
|
// default, probably.
|
||||||
_installables.push_back(".");
|
_installables.push_back(".");
|
||||||
installables = parseInstallables(getStore(), _installables);
|
return parseInstallables(getStore(), _installables);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()
|
std::vector<std::string> InstallablesCommand::getFlakesForCompletion()
|
||||||
{
|
{
|
||||||
if (_installables.empty()) {
|
if (_installables.empty()) {
|
||||||
if (useDefaultInstallables())
|
if (useDefaultInstallables())
|
||||||
return parseFlakeRefWithFragment(".", absPath(".")).first;
|
return {"."};
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
return parseFlakeRefWithFragment(_installables.front(), absPath(".")).first;
|
return _installables;
|
||||||
}
|
}
|
||||||
|
|
||||||
InstallableCommand::InstallableCommand(bool supportReadOnlyMode)
|
InstallableCommand::InstallableCommand(bool supportReadOnlyMode)
|
||||||
|
|
|
@ -132,6 +132,8 @@ struct Installable
|
||||||
const std::vector<std::shared_ptr<Installable>> & installables);
|
const std::vector<std::shared_ptr<Installable>> & installables);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef std::vector<std::shared_ptr<Installable>> Installables;
|
||||||
|
|
||||||
struct InstallableValue : Installable
|
struct InstallableValue : Installable
|
||||||
{
|
{
|
||||||
ref<EvalState> state;
|
ref<EvalState> state;
|
||||||
|
|
|
@ -22,6 +22,7 @@ extern "C" {
|
||||||
#include "ansicolor.hh"
|
#include "ansicolor.hh"
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
|
#include "eval-cache.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
@ -54,6 +55,8 @@ struct NixRepl
|
||||||
size_t debugTraceIndex;
|
size_t debugTraceIndex;
|
||||||
|
|
||||||
Strings loadedFiles;
|
Strings loadedFiles;
|
||||||
|
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
|
||||||
|
std::function<AnnotatedValues()> getValues;
|
||||||
|
|
||||||
const static int envSize = 32768;
|
const static int envSize = 32768;
|
||||||
std::shared_ptr<StaticEnv> staticEnv;
|
std::shared_ptr<StaticEnv> staticEnv;
|
||||||
|
@ -63,13 +66,15 @@ struct NixRepl
|
||||||
|
|
||||||
const Path historyFile;
|
const Path historyFile;
|
||||||
|
|
||||||
NixRepl(ref<EvalState> state);
|
NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
|
||||||
|
std::function<AnnotatedValues()> getValues);
|
||||||
~NixRepl();
|
~NixRepl();
|
||||||
void mainLoop(const std::vector<std::string> & files);
|
void mainLoop();
|
||||||
StringSet completePrefix(const std::string & prefix);
|
StringSet completePrefix(const std::string & prefix);
|
||||||
bool getLine(std::string & input, const std::string & prompt);
|
bool getLine(std::string & input, const std::string & prompt);
|
||||||
StorePath getDerivationPath(Value & v);
|
StorePath getDerivationPath(Value & v);
|
||||||
bool processLine(std::string line);
|
bool processLine(std::string line);
|
||||||
|
|
||||||
void loadFile(const Path & path);
|
void loadFile(const Path & path);
|
||||||
void loadFlake(const std::string & flakeRef);
|
void loadFlake(const std::string & flakeRef);
|
||||||
void initEnv();
|
void initEnv();
|
||||||
|
@ -96,9 +101,11 @@ std::string removeWhitespace(std::string s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
NixRepl::NixRepl(ref<EvalState> state)
|
NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
|
||||||
|
std::function<NixRepl::AnnotatedValues()> getValues)
|
||||||
: state(state)
|
: state(state)
|
||||||
, debugTraceIndex(0)
|
, debugTraceIndex(0)
|
||||||
|
, getValues(getValues)
|
||||||
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
|
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
|
||||||
, historyFile(getDataDir() + "/nix/repl-history")
|
, historyFile(getDataDir() + "/nix/repl-history")
|
||||||
{
|
{
|
||||||
|
@ -111,23 +118,20 @@ NixRepl::~NixRepl()
|
||||||
write_history(historyFile.c_str());
|
write_history(historyFile.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string runNix(Path program, const Strings & args,
|
void runNix(Path program, const Strings & args,
|
||||||
const std::optional<std::string> & input = {})
|
const std::optional<std::string> & input = {})
|
||||||
{
|
{
|
||||||
auto subprocessEnv = getEnv();
|
auto subprocessEnv = getEnv();
|
||||||
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
|
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
|
||||||
|
|
||||||
auto res = runProgram(RunOptions {
|
runProgram2(RunOptions {
|
||||||
.program = settings.nixBinDir+ "/" + program,
|
.program = settings.nixBinDir+ "/" + program,
|
||||||
.args = args,
|
.args = args,
|
||||||
.environment = subprocessEnv,
|
.environment = subprocessEnv,
|
||||||
.input = input,
|
.input = input,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!statusOk(res.first))
|
return;
|
||||||
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
|
|
||||||
|
|
||||||
return res.second;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static NixRepl * curRepl; // ugly
|
static NixRepl * curRepl; // ugly
|
||||||
|
@ -228,18 +232,12 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
void NixRepl::mainLoop(const std::vector<std::string> & files)
|
void NixRepl::mainLoop()
|
||||||
{
|
{
|
||||||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||||
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
|
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
|
||||||
|
|
||||||
if (!files.empty()) {
|
|
||||||
for (auto & i : files)
|
|
||||||
loadedFiles.push_back(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
loadFiles();
|
loadFiles();
|
||||||
if (!loadedFiles.empty()) notice("");
|
|
||||||
|
|
||||||
// Allow nix-repl specific settings in .inputrc
|
// Allow nix-repl specific settings in .inputrc
|
||||||
rl_readline_name = "nix-repl";
|
rl_readline_name = "nix-repl";
|
||||||
|
@ -749,7 +747,6 @@ bool NixRepl::processLine(std::string line)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void NixRepl::loadFile(const Path & path)
|
void NixRepl::loadFile(const Path & path)
|
||||||
{
|
{
|
||||||
loadedFiles.remove(path);
|
loadedFiles.remove(path);
|
||||||
|
@ -809,13 +806,15 @@ void NixRepl::loadFiles()
|
||||||
Strings old = loadedFiles;
|
Strings old = loadedFiles;
|
||||||
loadedFiles.clear();
|
loadedFiles.clear();
|
||||||
|
|
||||||
bool first = true;
|
|
||||||
for (auto & i : old) {
|
for (auto & i : old) {
|
||||||
if (!first) notice("");
|
|
||||||
first = false;
|
|
||||||
notice("Loading '%1%'...", i);
|
notice("Loading '%1%'...", i);
|
||||||
loadFile(i);
|
loadFile(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (auto & [i, what] : getValues()) {
|
||||||
|
notice("Loading installable '%1%'...", what);
|
||||||
|
addAttrsToScope(*i);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1015,7 +1014,17 @@ void runRepl(
|
||||||
ref<EvalState>evalState,
|
ref<EvalState>evalState,
|
||||||
const ValMap & extraEnv)
|
const ValMap & extraEnv)
|
||||||
{
|
{
|
||||||
auto repl = std::make_unique<NixRepl>(evalState);
|
auto getValues = [&]()->NixRepl::AnnotatedValues{
|
||||||
|
NixRepl::AnnotatedValues values;
|
||||||
|
return values;
|
||||||
|
};
|
||||||
|
const Strings & searchPath = {};
|
||||||
|
auto repl = std::make_unique<NixRepl>(
|
||||||
|
searchPath,
|
||||||
|
openStore(),
|
||||||
|
evalState,
|
||||||
|
getValues
|
||||||
|
);
|
||||||
|
|
||||||
repl->initEnv();
|
repl->initEnv();
|
||||||
|
|
||||||
|
@ -1023,20 +1032,40 @@ void runRepl(
|
||||||
for (auto & [name, value] : extraEnv)
|
for (auto & [name, value] : extraEnv)
|
||||||
repl->addVarToScope(repl->state->symbols.create(name), *value);
|
repl->addVarToScope(repl->state->symbols.create(name), *value);
|
||||||
|
|
||||||
repl->mainLoop({});
|
repl->mainLoop();
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CmdRepl : StoreCommand, MixEvalArgs
|
struct CmdRepl : InstallablesCommand
|
||||||
{
|
{
|
||||||
std::vector<std::string> files;
|
CmdRepl(){
|
||||||
|
evalSettings.pureEval = false;
|
||||||
CmdRepl()
|
}
|
||||||
|
void prepare()
|
||||||
{
|
{
|
||||||
expectArgs({
|
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
|
||||||
.label = "files",
|
warn("future versions of Nix will require using `--file` to load a file");
|
||||||
.handler = {&files},
|
if (this->_installables.size() > 1)
|
||||||
.completer = completePath
|
warn("more than one input file is not currently supported");
|
||||||
});
|
auto filePath = this->_installables[0].data();
|
||||||
|
file = std::optional(filePath);
|
||||||
|
_installables.front() = _installables.back();
|
||||||
|
_installables.pop_back();
|
||||||
|
}
|
||||||
|
installables = InstallablesCommand::load();
|
||||||
|
}
|
||||||
|
std::vector<std::string> files;
|
||||||
|
Strings getDefaultFlakeAttrPaths() override
|
||||||
|
{
|
||||||
|
return {""};
|
||||||
|
}
|
||||||
|
virtual bool useDefaultInstallables() override
|
||||||
|
{
|
||||||
|
return file.has_value() or expr.has_value();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool forceImpureByDefault() override
|
||||||
|
{
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
|
@ -1053,14 +1082,37 @@ struct CmdRepl : StoreCommand, MixEvalArgs
|
||||||
|
|
||||||
void run(ref<Store> store) override
|
void run(ref<Store> store) override
|
||||||
{
|
{
|
||||||
evalSettings.pureEval = false;
|
auto state = getEvalState();
|
||||||
|
auto getValues = [&]()->NixRepl::AnnotatedValues{
|
||||||
auto evalState = make_ref<EvalState>(searchPath, store);
|
auto installables = load();
|
||||||
|
NixRepl::AnnotatedValues values;
|
||||||
auto repl = std::make_unique<NixRepl>(evalState);
|
for (auto & installable: installables){
|
||||||
|
auto what = installable->what();
|
||||||
|
if (file){
|
||||||
|
auto [val, pos] = installable->toValue(*state);
|
||||||
|
auto what = installable->what();
|
||||||
|
state->forceValue(*val, pos);
|
||||||
|
auto autoArgs = getAutoArgs(*state);
|
||||||
|
auto valPost = state->allocValue();
|
||||||
|
state->autoCallFunction(*autoArgs, *val, *valPost);
|
||||||
|
state->forceValue(*valPost, pos);
|
||||||
|
values.push_back( {valPost, what });
|
||||||
|
} else {
|
||||||
|
auto [val, pos] = installable->toValue(*state);
|
||||||
|
values.push_back( {val, what} );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values;
|
||||||
|
};
|
||||||
|
auto repl = std::make_unique<NixRepl>(
|
||||||
|
searchPath,
|
||||||
|
openStore(),
|
||||||
|
state,
|
||||||
|
getValues
|
||||||
|
);
|
||||||
repl->autoArgs = getAutoArgs(*repl->state);
|
repl->autoArgs = getAutoArgs(*repl->state);
|
||||||
repl->initEnv();
|
repl->initEnv();
|
||||||
repl->mainLoop(files);
|
repl->mainLoop();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -282,7 +282,7 @@ struct AttrDb
|
||||||
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
|
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
|
||||||
if (!queryAttribute.next()) return {};
|
if (!queryAttribute.next()) return {};
|
||||||
|
|
||||||
auto rowId = (AttrType) queryAttribute.getInt(0);
|
auto rowId = (AttrId) queryAttribute.getInt(0);
|
||||||
auto type = (AttrType) queryAttribute.getInt(1);
|
auto type = (AttrType) queryAttribute.getInt(1);
|
||||||
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
|
@ -486,7 +486,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||||
return nullptr;
|
return nullptr;
|
||||||
else if (std::get_if<failed_t>(&attr->second)) {
|
else if (std::get_if<failed_t>(&attr->second)) {
|
||||||
if (forceErrors)
|
if (forceErrors)
|
||||||
debug("reevaluating failed cached attribute '%s'");
|
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
|
||||||
else
|
else
|
||||||
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
|
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
|
||||||
} else
|
} else
|
||||||
|
|
|
@ -464,9 +464,10 @@ EvalState::EvalState(
|
||||||
, emptyBindings(0)
|
, emptyBindings(0)
|
||||||
, store(store)
|
, store(store)
|
||||||
, buildStore(buildStore ? buildStore : store)
|
, buildStore(buildStore ? buildStore : store)
|
||||||
, debugRepl(0)
|
, debugRepl(nullptr)
|
||||||
, debugStop(false)
|
, debugStop(false)
|
||||||
, debugQuit(false)
|
, debugQuit(false)
|
||||||
|
, trylevel(0)
|
||||||
, regexCache(makeRegexCache())
|
, regexCache(makeRegexCache())
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
|
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
|
||||||
|
@ -832,7 +833,14 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
||||||
: nullptr;
|
: nullptr;
|
||||||
|
|
||||||
if (error)
|
if (error)
|
||||||
printError("%s\n\n" ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL, error->what());
|
{
|
||||||
|
printError("%s\n\n", error->what());
|
||||||
|
|
||||||
|
if (trylevel > 0 && error->info().level != lvlInfo)
|
||||||
|
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
|
||||||
|
|
||||||
|
printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
|
||||||
|
}
|
||||||
|
|
||||||
auto se = getStaticEnv(expr);
|
auto se = getStaticEnv(expr);
|
||||||
if (se) {
|
if (se) {
|
||||||
|
|
|
@ -130,6 +130,7 @@ public:
|
||||||
void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
|
void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
|
||||||
bool debugStop;
|
bool debugStop;
|
||||||
bool debugQuit;
|
bool debugQuit;
|
||||||
|
int trylevel;
|
||||||
std::list<DebugTrace> debugTraces;
|
std::list<DebugTrace> debugTraces;
|
||||||
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
|
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
|
||||||
const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const
|
const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const
|
||||||
|
@ -150,7 +151,7 @@ public:
|
||||||
if (debugRepl)
|
if (debugRepl)
|
||||||
runDebugRepl(&error, env, expr);
|
runDebugRepl(&error, env, expr);
|
||||||
|
|
||||||
throw error;
|
throw std::move(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class E>
|
template<class E>
|
||||||
|
@ -165,7 +166,7 @@ public:
|
||||||
runDebugRepl(&e, last.env, last.expr);
|
runDebugRepl(&e, last.env, last.expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw e;
|
throw std::move(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -646,6 +647,15 @@ struct EvalSettings : Config
|
||||||
|
|
||||||
Setting<bool> useEvalCache{this, true, "eval-cache",
|
Setting<bool> useEvalCache{this, true, "eval-cache",
|
||||||
"Whether to use the flake evaluation cache."};
|
"Whether to use the flake evaluation cache."};
|
||||||
|
|
||||||
|
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
|
||||||
|
R"(
|
||||||
|
If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
|
||||||
|
debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
|
||||||
|
)"};
|
||||||
|
|
||||||
|
Setting<bool> traceVerbose{this, false, "trace-verbose",
|
||||||
|
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
|
||||||
};
|
};
|
||||||
|
|
||||||
extern EvalSettings evalSettings;
|
extern EvalSettings evalSettings;
|
||||||
|
|
|
@ -384,6 +384,18 @@ LockedFlake lockFlake(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Check whether this input has overrides for a
|
||||||
|
non-existent input. */
|
||||||
|
for (auto [inputPath, inputOverride] : overrides) {
|
||||||
|
auto inputPath2(inputPath);
|
||||||
|
auto follow = inputPath2.back();
|
||||||
|
inputPath2.pop_back();
|
||||||
|
if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
|
||||||
|
warn(
|
||||||
|
"input '%s' has an override for a non-existent input '%s'",
|
||||||
|
printInputPath(inputPathPrefix), follow);
|
||||||
|
}
|
||||||
|
|
||||||
/* Go over the flake inputs, resolve/fetch them if
|
/* Go over the flake inputs, resolve/fetch them if
|
||||||
necessary (i.e. if they're new or the flakeref changed
|
necessary (i.e. if they're new or the flakeref changed
|
||||||
from what's in the lock file). */
|
from what's in the lock file). */
|
||||||
|
@ -513,6 +525,15 @@ LockedFlake lockFlake(
|
||||||
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
|
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
|
||||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||||
|
|
||||||
|
/* Note: in case of an --override-input, we use
|
||||||
|
the *original* ref (input2.ref) for the
|
||||||
|
"original" field, rather than the
|
||||||
|
override. This ensures that the override isn't
|
||||||
|
nuked the next time we update the lock
|
||||||
|
file. That is, overrides are sticky unless you
|
||||||
|
use --no-write-lock-file. */
|
||||||
|
auto ref = input2.ref ? *input2.ref : *input.ref;
|
||||||
|
|
||||||
if (input.isFlake) {
|
if (input.isFlake) {
|
||||||
Path localPath = parentPath;
|
Path localPath = parentPath;
|
||||||
FlakeRef localRef = *input.ref;
|
FlakeRef localRef = *input.ref;
|
||||||
|
@ -524,15 +545,7 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
||||||
|
|
||||||
/* Note: in case of an --override-input, we use
|
auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
|
||||||
the *original* ref (input2.ref) for the
|
|
||||||
"original" field, rather than the
|
|
||||||
override. This ensures that the override isn't
|
|
||||||
nuked the next time we update the lock
|
|
||||||
file. That is, overrides are sticky unless you
|
|
||||||
use --no-write-lock-file. */
|
|
||||||
auto childNode = std::make_shared<LockedNode>(
|
|
||||||
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
|
|
||||||
|
|
||||||
node->inputs.insert_or_assign(id, childNode);
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
|
|
||||||
|
@ -560,7 +573,7 @@ LockedFlake lockFlake(
|
||||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||||
state, *input.ref, useRegistries, flakeCache);
|
state, *input.ref, useRegistries, flakeCache);
|
||||||
node->inputs.insert_or_assign(id,
|
node->inputs.insert_or_assign(id,
|
||||||
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
|
std::make_shared<LockedNode>(lockedRef, ref, false));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -150,16 +150,16 @@ struct Expr
|
||||||
};
|
};
|
||||||
|
|
||||||
#define COMMON_METHODS \
|
#define COMMON_METHODS \
|
||||||
void show(const SymbolTable & symbols, std::ostream & str) const; \
|
void show(const SymbolTable & symbols, std::ostream & str) const override; \
|
||||||
void eval(EvalState & state, Env & env, Value & v); \
|
void eval(EvalState & state, Env & env, Value & v) override; \
|
||||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override;
|
||||||
|
|
||||||
struct ExprInt : Expr
|
struct ExprInt : Expr
|
||||||
{
|
{
|
||||||
NixInt n;
|
NixInt n;
|
||||||
Value v;
|
Value v;
|
||||||
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ struct ExprFloat : Expr
|
||||||
NixFloat nf;
|
NixFloat nf;
|
||||||
Value v;
|
Value v;
|
||||||
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ struct ExprString : Expr
|
||||||
std::string s;
|
std::string s;
|
||||||
Value v;
|
Value v;
|
||||||
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
|
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ struct ExprPath : Expr
|
||||||
std::string s;
|
std::string s;
|
||||||
Value v;
|
Value v;
|
||||||
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
|
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -213,7 +213,7 @@ struct ExprVar : Expr
|
||||||
|
|
||||||
ExprVar(Symbol name) : name(name) { };
|
ExprVar(Symbol name) : name(name) { };
|
||||||
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
|
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
PosIdx getPos() const override { return pos; }
|
PosIdx getPos() const override { return pos; }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
@ -326,7 +326,7 @@ struct ExprLambda : Expr
|
||||||
: pos(pos), formals(formals), body(body)
|
: pos(pos), formals(formals), body(body)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
void setName(Symbol name);
|
void setName(Symbol name) override;
|
||||||
std::string showNamePos(const EvalState & state) const;
|
std::string showNamePos(const EvalState & state) const;
|
||||||
inline bool hasFormals() const { return formals != nullptr; }
|
inline bool hasFormals() const { return formals != nullptr; }
|
||||||
PosIdx getPos() const override { return pos; }
|
PosIdx getPos() const override { return pos; }
|
||||||
|
@ -395,15 +395,15 @@ struct ExprOpNot : Expr
|
||||||
Expr * e1, * e2; \
|
Expr * e1, * e2; \
|
||||||
name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \
|
name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \
|
||||||
name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \
|
name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \
|
||||||
void show(const SymbolTable & symbols, std::ostream & str) const \
|
void show(const SymbolTable & symbols, std::ostream & str) const override \
|
||||||
{ \
|
{ \
|
||||||
str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \
|
str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \
|
||||||
} \
|
} \
|
||||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) \
|
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override \
|
||||||
{ \
|
{ \
|
||||||
e1->bindVars(es, env); e2->bindVars(es, env); \
|
e1->bindVars(es, env); e2->bindVars(es, env); \
|
||||||
} \
|
} \
|
||||||
void eval(EvalState & state, Env & env, Value & v); \
|
void eval(EvalState & state, Env & env, Value & v) override; \
|
||||||
PosIdx getPos() const override { return pos; } \
|
PosIdx getPos() const override { return pos; } \
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -520,6 +520,12 @@ path_start
|
||||||
$$ = new ExprPath(path);
|
$$ = new ExprPath(path);
|
||||||
}
|
}
|
||||||
| HPATH {
|
| HPATH {
|
||||||
|
if (evalSettings.pureEval) {
|
||||||
|
throw Error(
|
||||||
|
"the path '%s' can not be resolved in pure mode",
|
||||||
|
std::string_view($1.p, $1.l)
|
||||||
|
);
|
||||||
|
}
|
||||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||||
$$ = new ExprPath(path);
|
$$ = new ExprPath(path);
|
||||||
}
|
}
|
||||||
|
|
|
@ -851,6 +851,18 @@ static RegisterPrimOp primop_floor({
|
||||||
static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto attrs = state.buildBindings(2);
|
auto attrs = state.buildBindings(2);
|
||||||
|
|
||||||
|
/* increment state.trylevel, and decrement it when this function returns. */
|
||||||
|
MaintainCount trylevel(state.trylevel);
|
||||||
|
|
||||||
|
void (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
|
||||||
|
if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
|
||||||
|
{
|
||||||
|
/* to prevent starting the repl from exceptions withing a tryEval, null it. */
|
||||||
|
savedDebugRepl = state.debugRepl;
|
||||||
|
state.debugRepl = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
state.forceValue(*args[0], pos);
|
state.forceValue(*args[0], pos);
|
||||||
attrs.insert(state.sValue, args[0]);
|
attrs.insert(state.sValue, args[0]);
|
||||||
|
@ -859,6 +871,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||||
attrs.alloc(state.sValue).mkBool(false);
|
attrs.alloc(state.sValue).mkBool(false);
|
||||||
attrs.alloc("success").mkBool(false);
|
attrs.alloc("success").mkBool(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// restore the debugRepl pointer if we saved it earlier.
|
||||||
|
if (savedDebugRepl)
|
||||||
|
state.debugRepl = savedDebugRepl;
|
||||||
|
|
||||||
v.mkAttrs(attrs);
|
v.mkAttrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -970,6 +987,15 @@ static RegisterPrimOp primop_trace({
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/* Takes two arguments and evaluates to the second one. Used as the
|
||||||
|
* builtins.traceVerbose implementation when --trace-verbose is not enabled
|
||||||
|
*/
|
||||||
|
static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
|
{
|
||||||
|
state.forceValue(*args[1], pos);
|
||||||
|
v = *args[1];
|
||||||
|
}
|
||||||
|
|
||||||
/*************************************************************
|
/*************************************************************
|
||||||
* Derivations
|
* Derivations
|
||||||
*************************************************************/
|
*************************************************************/
|
||||||
|
@ -3926,6 +3952,18 @@ void EvalState::createBaseEnv()
|
||||||
addPrimOp("__exec", 1, prim_exec);
|
addPrimOp("__exec", 1, prim_exec);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
addPrimOp({
|
||||||
|
.fun = evalSettings.traceVerbose ? prim_trace : prim_second,
|
||||||
|
.arity = 2,
|
||||||
|
.name = "__traceVerbose",
|
||||||
|
.args = { "e1", "e2" },
|
||||||
|
.doc = R"(
|
||||||
|
Evaluate *e1* and print its abstract syntax representation on standard
|
||||||
|
error if `--trace-verbose` is enabled. Then return *e2*. This function
|
||||||
|
is useful for debugging.
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
/* Add a value containing the current Nix expression search path. */
|
/* Add a value containing the current Nix expression search path. */
|
||||||
mkList(v, searchPath.size());
|
mkList(v, searchPath.size());
|
||||||
int n = 0;
|
int n = 0;
|
||||||
|
|
|
@ -364,6 +364,10 @@ static RegisterPrimOp primop_fetchGit({
|
||||||
A Boolean parameter that specifies whether submodules should be
|
A Boolean parameter that specifies whether submodules should be
|
||||||
checked out. Defaults to `false`.
|
checked out. Defaults to `false`.
|
||||||
|
|
||||||
|
- shallow\
|
||||||
|
A Boolean parameter that specifies whether fetching a shallow clone
|
||||||
|
is allowed. Defaults to `false`.
|
||||||
|
|
||||||
- allRefs\
|
- allRefs\
|
||||||
Whether to fetch all refs of the repository. With this argument being
|
Whether to fetch all refs of the repository. With this argument being
|
||||||
true, it's possible to load a `rev` from *any* `ref` (by default only
|
true, it's possible to load a `rev` from *any* `ref` (by default only
|
||||||
|
|
|
@ -540,22 +540,22 @@ namespace nix {
|
||||||
ASSERT_THAT(v, IsStringEq(output));
|
ASSERT_THAT(v, IsStringEq(output));
|
||||||
}
|
}
|
||||||
|
|
||||||
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " #input), std::string_view(output)))
|
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
|
||||||
INSTANTIATE_TEST_SUITE_P(
|
INSTANTIATE_TEST_SUITE_P(
|
||||||
toString,
|
toString,
|
||||||
ToStringPrimOpTest,
|
ToStringPrimOpTest,
|
||||||
testing::Values(
|
testing::Values(
|
||||||
CASE("foo", "foo"),
|
CASE(R"("foo")", "foo"),
|
||||||
CASE(1, "1"),
|
CASE(R"(1)", "1"),
|
||||||
CASE([1 2 3], "1 2 3"),
|
CASE(R"([1 2 3])", "1 2 3"),
|
||||||
CASE(.123, "0.123000"),
|
CASE(R"(.123)", "0.123000"),
|
||||||
CASE(true, "1"),
|
CASE(R"(true)", "1"),
|
||||||
CASE(false, ""),
|
CASE(R"(false)", ""),
|
||||||
CASE(null, ""),
|
CASE(R"(null)", ""),
|
||||||
CASE({ v = "bar"; __toString = self: self.v; }, "bar"),
|
CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
|
||||||
CASE({ v = "bar"; __toString = self: self.v; outPath = "foo"; }, "bar"),
|
CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
|
||||||
CASE({ outPath = "foo"; }, "foo"),
|
CASE(R"({ outPath = "foo"; })", "foo"),
|
||||||
CASE(./test, "/test")
|
CASE(R"(./test)", "/test")
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
#undef CASE
|
#undef CASE
|
||||||
|
|
|
@ -70,7 +70,7 @@ struct FetchSettings : public Config
|
||||||
Setting<bool> warnDirty{this, true, "warn-dirty",
|
Setting<bool> warnDirty{this, true, "warn-dirty",
|
||||||
"Whether to warn about dirty Git/Mercurial trees."};
|
"Whether to warn about dirty Git/Mercurial trees."};
|
||||||
|
|
||||||
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
|
Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
|
||||||
"Path or URI of the global flake registry."};
|
"Path or URI of the global flake registry."};
|
||||||
|
|
||||||
Setting<bool> useRegistries{this, true, "use-registries",
|
Setting<bool> useRegistries{this, true, "use-registries",
|
||||||
|
|
|
@ -85,8 +85,9 @@ std::optional<std::string> readHead(const Path & path)
|
||||||
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
|
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
|
||||||
{
|
{
|
||||||
Path cacheDir = getCachePath(actualUrl);
|
Path cacheDir = getCachePath(actualUrl);
|
||||||
|
auto gitDir = ".";
|
||||||
try {
|
try {
|
||||||
runProgram("git", true, { "-C", cacheDir, "symbolic-ref", "--", "HEAD", headRef });
|
runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef });
|
||||||
} catch (ExecError &e) {
|
} catch (ExecError &e) {
|
||||||
if (!WIFEXITED(e.status)) throw;
|
if (!WIFEXITED(e.status)) throw;
|
||||||
return false;
|
return false;
|
||||||
|
@ -182,7 +183,7 @@ WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
|
||||||
if (hasHead) {
|
if (hasHead) {
|
||||||
// Using git diff is preferrable over lower-level operations here,
|
// Using git diff is preferrable over lower-level operations here,
|
||||||
// because its conceptually simpler and we only need the exit code anyways.
|
// because its conceptually simpler and we only need the exit code anyways.
|
||||||
auto gitDiffOpts = Strings({ "-C", workdir, "diff", "HEAD", "--quiet"});
|
auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
|
||||||
if (!submodules) {
|
if (!submodules) {
|
||||||
// Changes in submodules should only make the tree dirty
|
// Changes in submodules should only make the tree dirty
|
||||||
// when those submodules will be copied as well.
|
// when those submodules will be copied as well.
|
||||||
|
@ -203,6 +204,7 @@ WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
|
||||||
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
|
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
|
||||||
{
|
{
|
||||||
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||||
|
auto gitDir = ".git";
|
||||||
|
|
||||||
if (!fetchSettings.allowDirty)
|
if (!fetchSettings.allowDirty)
|
||||||
throw Error("Git tree '%s' is dirty", workdir);
|
throw Error("Git tree '%s' is dirty", workdir);
|
||||||
|
@ -210,7 +212,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
|
||||||
if (fetchSettings.warnDirty)
|
if (fetchSettings.warnDirty)
|
||||||
warn("Git tree '%s' is dirty", workdir);
|
warn("Git tree '%s' is dirty", workdir);
|
||||||
|
|
||||||
auto gitOpts = Strings({ "-C", workdir, "ls-files", "-z" });
|
auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
|
||||||
if (submodules)
|
if (submodules)
|
||||||
gitOpts.emplace_back("--recurse-submodules");
|
gitOpts.emplace_back("--recurse-submodules");
|
||||||
|
|
||||||
|
@ -240,7 +242,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
|
||||||
// modified dirty file?
|
// modified dirty file?
|
||||||
input.attrs.insert_or_assign(
|
input.attrs.insert_or_assign(
|
||||||
"lastModified",
|
"lastModified",
|
||||||
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||||
|
|
||||||
return {std::move(storePath), input};
|
return {std::move(storePath), input};
|
||||||
}
|
}
|
||||||
|
@ -572,7 +574,7 @@ struct GitInputScheme : InputScheme
|
||||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||||
|
|
||||||
if (isShallow && !shallow)
|
if (isShallow && !shallow)
|
||||||
throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
|
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
|
||||||
|
|
||||||
// FIXME: check whether rev is an ancestor of ref.
|
// FIXME: check whether rev is an ancestor of ref.
|
||||||
|
|
||||||
|
|
|
@ -381,7 +381,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
|
|
||||||
Headers headers = makeHeadersWithAuthTokens(host);
|
Headers headers = makeHeadersWithAuthTokens(host);
|
||||||
|
|
||||||
std::string ref_uri;
|
std::string refUri;
|
||||||
if (ref == "HEAD") {
|
if (ref == "HEAD") {
|
||||||
auto file = store->toRealPath(
|
auto file = store->toRealPath(
|
||||||
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
|
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
|
||||||
|
@ -393,10 +393,11 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
if (!remoteLine) {
|
if (!remoteLine) {
|
||||||
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
|
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
|
||||||
}
|
}
|
||||||
ref_uri = remoteLine->target;
|
refUri = remoteLine->target;
|
||||||
} else {
|
} else {
|
||||||
ref_uri = fmt("refs/(heads|tags)/%s", ref);
|
refUri = fmt("refs/(heads|tags)/%s", ref);
|
||||||
}
|
}
|
||||||
|
std::regex refRegex(refUri);
|
||||||
|
|
||||||
auto file = store->toRealPath(
|
auto file = store->toRealPath(
|
||||||
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
|
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
|
||||||
|
@ -406,7 +407,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
std::optional<std::string> id;
|
std::optional<std::string> id;
|
||||||
while(!id && getline(is, line)) {
|
while(!id && getline(is, line)) {
|
||||||
auto parsedLine = git::parseLsRemoteLine(line);
|
auto parsedLine = git::parseLsRemoteLine(line);
|
||||||
if (parsedLine && parsedLine->reference == ref_uri)
|
if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex))
|
||||||
id = parsedLine->target;
|
id = parsedLine->target;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,22 @@ HookInstance::HookInstance()
|
||||||
{
|
{
|
||||||
debug("starting build hook '%s'", settings.buildHook);
|
debug("starting build hook '%s'", settings.buildHook);
|
||||||
|
|
||||||
|
auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
|
||||||
|
|
||||||
|
if (buildHookArgs.empty())
|
||||||
|
throw Error("'build-hook' setting is empty");
|
||||||
|
|
||||||
|
auto buildHook = buildHookArgs.front();
|
||||||
|
buildHookArgs.pop_front();
|
||||||
|
|
||||||
|
Strings args;
|
||||||
|
|
||||||
|
for (auto & arg : buildHookArgs)
|
||||||
|
args.push_back(arg);
|
||||||
|
|
||||||
|
args.push_back(std::string(baseNameOf(settings.buildHook.get())));
|
||||||
|
args.push_back(std::to_string(verbosity));
|
||||||
|
|
||||||
/* Create a pipe to get the output of the child. */
|
/* Create a pipe to get the output of the child. */
|
||||||
fromHook.create();
|
fromHook.create();
|
||||||
|
|
||||||
|
@ -36,14 +52,9 @@ HookInstance::HookInstance()
|
||||||
if (dup2(builderOut.readSide.get(), 5) == -1)
|
if (dup2(builderOut.readSide.get(), 5) == -1)
|
||||||
throw SysError("dupping builder's stdout/stderr");
|
throw SysError("dupping builder's stdout/stderr");
|
||||||
|
|
||||||
Strings args = {
|
execv(buildHook.c_str(), stringsToCharPtrs(args).data());
|
||||||
std::string(baseNameOf(settings.buildHook.get())),
|
|
||||||
std::to_string(verbosity),
|
|
||||||
};
|
|
||||||
|
|
||||||
execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data());
|
throw SysError("executing '%s'", buildHook);
|
||||||
|
|
||||||
throw SysError("executing '%s'", settings.buildHook);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
pid.setSeparatePG(true);
|
pid.setSeparatePG(true);
|
||||||
|
|
|
@ -1717,6 +1717,18 @@ void LocalDerivationGoal::runChild()
|
||||||
|
|
||||||
for (auto & i : dirsInChroot) {
|
for (auto & i : dirsInChroot) {
|
||||||
if (i.second.source == "/proc") continue; // backwards compatibility
|
if (i.second.source == "/proc") continue; // backwards compatibility
|
||||||
|
|
||||||
|
#if HAVE_EMBEDDED_SANDBOX_SHELL
|
||||||
|
if (i.second.source == "__embedded_sandbox_shell__") {
|
||||||
|
static unsigned char sh[] = {
|
||||||
|
#include "embedded-sandbox-shell.gen.hh"
|
||||||
|
};
|
||||||
|
auto dst = chrootRootDir + i.first;
|
||||||
|
createDirs(dirOf(dst));
|
||||||
|
writeFile(dst, std::string_view((const char *) sh, sizeof(sh)));
|
||||||
|
chmod_(dst, 0555);
|
||||||
|
} else
|
||||||
|
#endif
|
||||||
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
|
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -154,7 +154,7 @@ void PathSubstitutionGoal::tryNext()
|
||||||
only after we've downloaded the path. */
|
only after we've downloaded the path. */
|
||||||
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
|
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
|
||||||
{
|
{
|
||||||
warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
|
warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
|
||||||
worker.store.printStorePath(storePath), sub->getUri());
|
worker.store.printStorePath(storePath), sub->getUri());
|
||||||
tryNext();
|
tryNext();
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -135,6 +135,7 @@ void LocalStore::addTempRoot(const StorePath & path)
|
||||||
state->fdRootsSocket.close();
|
state->fdRootsSocket.close();
|
||||||
goto restart;
|
goto restart;
|
||||||
}
|
}
|
||||||
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,6 +154,7 @@ void LocalStore::addTempRoot(const StorePath & path)
|
||||||
state->fdRootsSocket.close();
|
state->fdRootsSocket.close();
|
||||||
goto restart;
|
goto restart;
|
||||||
}
|
}
|
||||||
|
throw;
|
||||||
} catch (EndOfFile & e) {
|
} catch (EndOfFile & e) {
|
||||||
debug("GC socket disconnected");
|
debug("GC socket disconnected");
|
||||||
state->fdRootsSocket.close();
|
state->fdRootsSocket.close();
|
||||||
|
|
|
@ -36,7 +36,6 @@ Settings::Settings()
|
||||||
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
|
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
|
||||||
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
|
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
|
||||||
, nixUserConfFiles(getUserConfigFiles())
|
, nixUserConfFiles(getUserConfigFiles())
|
||||||
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
|
|
||||||
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
|
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
|
||||||
, nixManDir(canonPath(NIX_MAN_DIR))
|
, nixManDir(canonPath(NIX_MAN_DIR))
|
||||||
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
|
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
|
||||||
|
@ -67,12 +66,13 @@ Settings::Settings()
|
||||||
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
|
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/* chroot-like behavior from Apple's sandbox */
|
||||||
/* chroot-like behavior from Apple's sandbox */
|
|
||||||
#if __APPLE__
|
#if __APPLE__
|
||||||
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
|
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
|
||||||
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
|
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
buildHook = getSelfExe().value_or("nix") + " __build-remote";
|
||||||
}
|
}
|
||||||
|
|
||||||
void loadConfFile()
|
void loadConfFile()
|
||||||
|
|
|
@ -79,9 +79,6 @@ public:
|
||||||
/* A list of user configuration files to load. */
|
/* A list of user configuration files to load. */
|
||||||
std::vector<Path> nixUserConfFiles;
|
std::vector<Path> nixUserConfFiles;
|
||||||
|
|
||||||
/* The directory where internal helper programs are stored. */
|
|
||||||
Path nixLibexecDir;
|
|
||||||
|
|
||||||
/* The directory where the main programs are stored. */
|
/* The directory where the main programs are stored. */
|
||||||
Path nixBinDir;
|
Path nixBinDir;
|
||||||
|
|
||||||
|
@ -195,7 +192,7 @@ public:
|
||||||
)",
|
)",
|
||||||
{"build-timeout"}};
|
{"build-timeout"}};
|
||||||
|
|
||||||
PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook",
|
PathSetting buildHook{this, true, "", "build-hook",
|
||||||
"The path of the helper program that executes builds to remote machines."};
|
"The path of the helper program that executes builds to remote machines."};
|
||||||
|
|
||||||
Setting<std::string> builders{
|
Setting<std::string> builders{
|
||||||
|
@ -802,7 +799,7 @@ public:
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<StringSet> ignoredAcls{
|
Setting<StringSet> ignoredAcls{
|
||||||
this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls",
|
this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls",
|
||||||
R"(
|
R"(
|
||||||
A list of ACLs that should be ignored, normally Nix attempts to
|
A list of ACLs that should be ignored, normally Nix attempts to
|
||||||
remove all ACLs from files and directories in the Nix store, but
|
remove all ACLs from files and directories in the Nix store, but
|
||||||
|
|
|
@ -69,6 +69,7 @@ protected:
|
||||||
} catch (SysError & e) {
|
} catch (SysError & e) {
|
||||||
if (e.errNo == ENOENT)
|
if (e.errNo == ENOENT)
|
||||||
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
|
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
|
||||||
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,14 +39,23 @@ libstore_CXXFLAGS += \
|
||||||
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
|
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
|
||||||
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
|
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
|
||||||
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
|
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
|
||||||
-DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \
|
|
||||||
-DNIX_BIN_DIR=\"$(bindir)\" \
|
-DNIX_BIN_DIR=\"$(bindir)\" \
|
||||||
-DNIX_MAN_DIR=\"$(mandir)\" \
|
-DNIX_MAN_DIR=\"$(mandir)\" \
|
||||||
-DLSOF=\"$(lsof)\"
|
-DLSOF=\"$(lsof)\"
|
||||||
|
|
||||||
|
ifeq ($(embedded_sandbox_shell),yes)
|
||||||
|
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
|
||||||
|
|
||||||
|
$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
|
||||||
|
|
||||||
|
$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
|
||||||
|
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
|
||||||
|
@mv $@.tmp $@
|
||||||
|
else
|
||||||
ifneq ($(sandbox_shell),)
|
ifneq ($(sandbox_shell),)
|
||||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
||||||
endif
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||||
|
|
||||||
|
|
|
@ -67,13 +67,26 @@ bool UserLock::findFreeUser() {
|
||||||
#if __linux__
|
#if __linux__
|
||||||
/* Get the list of supplementary groups of this build user. This
|
/* Get the list of supplementary groups of this build user. This
|
||||||
is usually either empty or contains a group such as "kvm". */
|
is usually either empty or contains a group such as "kvm". */
|
||||||
supplementaryGIDs.resize(10);
|
int ngroups = 32; // arbitrary initial guess
|
||||||
int ngroups = supplementaryGIDs.size();
|
supplementaryGIDs.resize(ngroups);
|
||||||
int err = getgrouplist(pw->pw_name, pw->pw_gid,
|
|
||||||
supplementaryGIDs.data(), &ngroups);
|
|
||||||
if (err == -1)
|
|
||||||
throw Error("failed to get list of supplementary groups for '%1%'", pw->pw_name);
|
|
||||||
|
|
||||||
|
int err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
|
||||||
|
&ngroups);
|
||||||
|
|
||||||
|
// Our initial size of 32 wasn't sufficient, the correct size has
|
||||||
|
// been stored in ngroups, so we try again.
|
||||||
|
if (err == -1) {
|
||||||
|
supplementaryGIDs.resize(ngroups);
|
||||||
|
err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
|
||||||
|
&ngroups);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it failed once more, then something must be broken.
|
||||||
|
if (err == -1)
|
||||||
|
throw Error("failed to get list of supplementary groups for '%1%'",
|
||||||
|
pw->pw_name);
|
||||||
|
|
||||||
|
// Finally, trim back the GID list to its real size
|
||||||
supplementaryGIDs.resize(ngroups);
|
supplementaryGIDs.resize(ngroups);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -62,6 +62,9 @@ public:
|
||||||
/* How often to purge expired entries from the cache. */
|
/* How often to purge expired entries from the cache. */
|
||||||
const int purgeInterval = 24 * 3600;
|
const int purgeInterval = 24 * 3600;
|
||||||
|
|
||||||
|
/* How long to cache binary cache info (i.e. /nix-cache-info) */
|
||||||
|
const int cacheInfoTtl = 7 * 24 * 3600;
|
||||||
|
|
||||||
struct Cache
|
struct Cache
|
||||||
{
|
{
|
||||||
int id;
|
int id;
|
||||||
|
@ -98,7 +101,7 @@ public:
|
||||||
"insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
|
"insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
|
||||||
|
|
||||||
state->queryCache.create(state->db,
|
state->queryCache.create(state->db,
|
||||||
"select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?");
|
"select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?");
|
||||||
|
|
||||||
state->insertNAR.create(state->db,
|
state->insertNAR.create(state->db,
|
||||||
"insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, "
|
"insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, "
|
||||||
|
@ -183,7 +186,7 @@ public:
|
||||||
|
|
||||||
auto i = state->caches.find(uri);
|
auto i = state->caches.find(uri);
|
||||||
if (i == state->caches.end()) {
|
if (i == state->caches.end()) {
|
||||||
auto queryCache(state->queryCache.use()(uri));
|
auto queryCache(state->queryCache.use()(uri)(time(0) - cacheInfoTtl));
|
||||||
if (!queryCache.next())
|
if (!queryCache.next())
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
state->caches.emplace(uri,
|
state->caches.emplace(uri,
|
||||||
|
|
|
@ -69,8 +69,6 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
if (value != "unknown-deriver")
|
if (value != "unknown-deriver")
|
||||||
deriver = StorePath(value);
|
deriver = StorePath(value);
|
||||||
}
|
}
|
||||||
else if (name == "System")
|
|
||||||
system = value;
|
|
||||||
else if (name == "Sig")
|
else if (name == "Sig")
|
||||||
sigs.insert(value);
|
sigs.insert(value);
|
||||||
else if (name == "CA") {
|
else if (name == "CA") {
|
||||||
|
@ -106,9 +104,6 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
if (deriver)
|
if (deriver)
|
||||||
res += "Deriver: " + std::string(deriver->to_string()) + "\n";
|
res += "Deriver: " + std::string(deriver->to_string()) + "\n";
|
||||||
|
|
||||||
if (!system.empty())
|
|
||||||
res += "System: " + system + "\n";
|
|
||||||
|
|
||||||
for (auto sig : sigs)
|
for (auto sig : sigs)
|
||||||
res += "Sig: " + sig + "\n";
|
res += "Sig: " + sig + "\n";
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@ struct NarInfo : ValidPathInfo
|
||||||
std::string compression;
|
std::string compression;
|
||||||
std::optional<Hash> fileHash;
|
std::optional<Hash> fileHash;
|
||||||
uint64_t fileSize = 0;
|
uint64_t fileSize = 0;
|
||||||
std::string system;
|
|
||||||
|
|
||||||
NarInfo() = delete;
|
NarInfo() = delete;
|
||||||
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
|
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
create table if not exists ValidPaths (
|
create table if not exists ValidPaths (
|
||||||
id integer primary key autoincrement not null,
|
id integer primary key autoincrement not null,
|
||||||
path text unique not null,
|
path text unique not null,
|
||||||
hash text not null,
|
hash text not null, -- base16 representation
|
||||||
registrationTime integer not null,
|
registrationTime integer not null,
|
||||||
deriver text,
|
deriver text,
|
||||||
narSize integer,
|
narSize integer,
|
||||||
|
|
|
@ -1302,7 +1302,8 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
|
||||||
return {uri, params};
|
return {uri, params};
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool isNonUriPath(const std::string & spec) {
|
static bool isNonUriPath(const std::string & spec)
|
||||||
|
{
|
||||||
return
|
return
|
||||||
// is not a URL
|
// is not a URL
|
||||||
spec.find("://") == std::string::npos
|
spec.find("://") == std::string::npos
|
||||||
|
@ -1319,6 +1320,26 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
|
||||||
return std::make_shared<LocalStore>(params);
|
return std::make_shared<LocalStore>(params);
|
||||||
else if (pathExists(settings.nixDaemonSocketFile))
|
else if (pathExists(settings.nixDaemonSocketFile))
|
||||||
return std::make_shared<UDSRemoteStore>(params);
|
return std::make_shared<UDSRemoteStore>(params);
|
||||||
|
#if __linux__
|
||||||
|
else if (!pathExists(stateDir) && params.empty() && getuid() != 0 && !getEnv("NIX_STORE_DIR").has_value()) {
|
||||||
|
/* If /nix doesn't exist, there is no daemon socket, and
|
||||||
|
we're not root, then automatically set up a chroot
|
||||||
|
store in ~/.local/share/nix/root. */
|
||||||
|
auto chrootStore = getDataDir() + "/nix/root";
|
||||||
|
if (!pathExists(chrootStore)) {
|
||||||
|
try {
|
||||||
|
createDirs(chrootStore);
|
||||||
|
} catch (Error & e) {
|
||||||
|
return std::make_shared<LocalStore>(params);
|
||||||
|
}
|
||||||
|
warn("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
|
||||||
|
} else
|
||||||
|
debug("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
|
||||||
|
Store::Params params2;
|
||||||
|
params2["root"] = chrootStore;
|
||||||
|
return std::make_shared<LocalStore>(params2);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
else
|
else
|
||||||
return std::make_shared<LocalStore>(params);
|
return std::make_shared<LocalStore>(params);
|
||||||
} else if (uri == "daemon") {
|
} else if (uri == "daemon") {
|
||||||
|
|
|
@ -124,7 +124,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
||||||
bool anyCompleted = false;
|
bool anyCompleted = false;
|
||||||
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
|
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
|
||||||
if (pos == end) {
|
if (pos == end) {
|
||||||
if (flag.handler.arity == ArityAny) break;
|
if (flag.handler.arity == ArityAny || anyCompleted) break;
|
||||||
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
|
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
|
||||||
}
|
}
|
||||||
if (auto prefix = needsCompletion(*pos)) {
|
if (auto prefix = needsCompletion(*pos)) {
|
||||||
|
@ -362,6 +362,14 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
|
||||||
return Args::processArgs(args, finish);
|
return Args::processArgs(args, finish);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MultiCommand::completionHook()
|
||||||
|
{
|
||||||
|
if (command)
|
||||||
|
return command->second->completionHook();
|
||||||
|
else
|
||||||
|
return Args::completionHook();
|
||||||
|
}
|
||||||
|
|
||||||
nlohmann::json MultiCommand::toJSON()
|
nlohmann::json MultiCommand::toJSON()
|
||||||
{
|
{
|
||||||
auto cmds = nlohmann::json::object();
|
auto cmds = nlohmann::json::object();
|
||||||
|
|
|
@ -25,6 +25,8 @@ public:
|
||||||
/* Return a short one-line description of the command. */
|
/* Return a short one-line description of the command. */
|
||||||
virtual std::string description() { return ""; }
|
virtual std::string description() { return ""; }
|
||||||
|
|
||||||
|
virtual bool forceImpureByDefault() { return false; }
|
||||||
|
|
||||||
/* Return documentation about this command, in Markdown format. */
|
/* Return documentation about this command, in Markdown format. */
|
||||||
virtual std::string doc() { return ""; }
|
virtual std::string doc() { return ""; }
|
||||||
|
|
||||||
|
@ -146,6 +148,11 @@ protected:
|
||||||
argument (if any) have been processed. */
|
argument (if any) have been processed. */
|
||||||
virtual void initialFlagsProcessed() {}
|
virtual void initialFlagsProcessed() {}
|
||||||
|
|
||||||
|
/* Called after the command line has been processed if we need to generate
|
||||||
|
completions. Useful for commands that need to know the whole command line
|
||||||
|
in order to know what completions to generate. */
|
||||||
|
virtual void completionHook() { }
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
void addFlag(Flag && flag);
|
void addFlag(Flag && flag);
|
||||||
|
@ -221,6 +228,8 @@ public:
|
||||||
|
|
||||||
bool processArgs(const Strings & args, bool finish) override;
|
bool processArgs(const Strings & args, bool finish) override;
|
||||||
|
|
||||||
|
void completionHook() override;
|
||||||
|
|
||||||
nlohmann::json toJSON() override;
|
nlohmann::json toJSON() override;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
|
||||||
{ Xp::RecursiveNix, "recursive-nix" },
|
{ Xp::RecursiveNix, "recursive-nix" },
|
||||||
{ Xp::NoUrlLiterals, "no-url-literals" },
|
{ Xp::NoUrlLiterals, "no-url-literals" },
|
||||||
{ Xp::FetchClosure, "fetch-closure" },
|
{ Xp::FetchClosure, "fetch-closure" },
|
||||||
|
{ Xp::ReplFlake, "repl-flake" },
|
||||||
{ Xp::ComputedDerivations, "computed-derivations" },
|
{ Xp::ComputedDerivations, "computed-derivations" },
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ enum struct ExperimentalFeature
|
||||||
RecursiveNix,
|
RecursiveNix,
|
||||||
NoUrlLiterals,
|
NoUrlLiterals,
|
||||||
FetchClosure,
|
FetchClosure,
|
||||||
|
ReplFlake,
|
||||||
ComputedDerivations, // RFC 92
|
ComputedDerivations, // RFC 92
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -8,9 +8,9 @@ std::string hiliteMatches(
|
||||||
std::string_view prefix,
|
std::string_view prefix,
|
||||||
std::string_view postfix)
|
std::string_view postfix)
|
||||||
{
|
{
|
||||||
// Avoid copy on zero matches
|
// Avoid extra work on zero matches
|
||||||
if (matches.size() == 0)
|
if (matches.size() == 0)
|
||||||
return (std::string) s;
|
return std::string(s);
|
||||||
|
|
||||||
std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) {
|
std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) {
|
||||||
return a.position() < b.position();
|
return a.position() < b.position();
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
|
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
#include <sys/syscall.h>
|
#include <sys/syscall.h>
|
||||||
|
#include <mach-o/dyld.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef __linux__
|
#ifdef __linux__
|
||||||
|
@ -574,6 +575,20 @@ Path getHome()
|
||||||
static Path homeDir = []()
|
static Path homeDir = []()
|
||||||
{
|
{
|
||||||
auto homeDir = getEnv("HOME");
|
auto homeDir = getEnv("HOME");
|
||||||
|
if (homeDir) {
|
||||||
|
// Only use $HOME if doesn't exist or is owned by the current user.
|
||||||
|
struct stat st;
|
||||||
|
int result = stat(homeDir->c_str(), &st);
|
||||||
|
if (result != 0) {
|
||||||
|
if (errno != ENOENT) {
|
||||||
|
warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno);
|
||||||
|
homeDir.reset();
|
||||||
|
}
|
||||||
|
} else if (st.st_uid != geteuid()) {
|
||||||
|
warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file", *homeDir);
|
||||||
|
homeDir.reset();
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!homeDir) {
|
if (!homeDir) {
|
||||||
std::vector<char> buf(16384);
|
std::vector<char> buf(16384);
|
||||||
struct passwd pwbuf;
|
struct passwd pwbuf;
|
||||||
|
@ -619,6 +634,27 @@ Path getDataDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<Path> getSelfExe()
|
||||||
|
{
|
||||||
|
static auto cached = []() -> std::optional<Path>
|
||||||
|
{
|
||||||
|
#if __linux__
|
||||||
|
return readLink("/proc/self/exe");
|
||||||
|
#elif __APPLE__
|
||||||
|
char buf[1024];
|
||||||
|
uint32_t size = sizeof(buf);
|
||||||
|
if (_NSGetExecutablePath(buf, &size) == 0)
|
||||||
|
return buf;
|
||||||
|
else
|
||||||
|
return std::nullopt;
|
||||||
|
#else
|
||||||
|
return std::nullopt;
|
||||||
|
#endif
|
||||||
|
}();
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Paths createDirs(const Path & path)
|
Paths createDirs(const Path & path)
|
||||||
{
|
{
|
||||||
Paths created;
|
Paths created;
|
||||||
|
|
|
@ -149,10 +149,14 @@ std::vector<Path> getConfigDirs();
|
||||||
/* Return $XDG_DATA_HOME or $HOME/.local/share. */
|
/* Return $XDG_DATA_HOME or $HOME/.local/share. */
|
||||||
Path getDataDir();
|
Path getDataDir();
|
||||||
|
|
||||||
|
/* Return the path of the current executable. */
|
||||||
|
std::optional<Path> getSelfExe();
|
||||||
|
|
||||||
/* Create a directory and all its parents, if necessary. Returns the
|
/* Create a directory and all its parents, if necessary. Returns the
|
||||||
list of created directories, in order of creation. */
|
list of created directories, in order of creation. */
|
||||||
Paths createDirs(const Path & path);
|
Paths createDirs(const Path & path);
|
||||||
inline Paths createDirs(PathView path) {
|
inline Paths createDirs(PathView path)
|
||||||
|
{
|
||||||
return createDirs(Path(path));
|
return createDirs(Path(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -700,4 +704,19 @@ template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||||
std::string showBytes(uint64_t bytes);
|
std::string showBytes(uint64_t bytes);
|
||||||
|
|
||||||
|
|
||||||
|
/* Provide an addition operator between strings and string_views
|
||||||
|
inexplicably omitted from the standard library. */
|
||||||
|
inline std::string operator + (const std::string & s1, std::string_view s2)
|
||||||
|
{
|
||||||
|
auto s = s1;
|
||||||
|
s.append(s2);
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline std::string operator + (std::string && s, std::string_view s2)
|
||||||
|
{
|
||||||
|
s.append(s2);
|
||||||
|
return std::move(s);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -257,11 +257,12 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
|
|
||||||
auto autoArgs = myArgs.getAutoArgs(*state);
|
auto autoArgs = myArgs.getAutoArgs(*state);
|
||||||
|
|
||||||
|
auto autoArgsWithInNixShell = autoArgs;
|
||||||
if (runEnv) {
|
if (runEnv) {
|
||||||
auto newArgs = state->buildBindings(autoArgs->size() + 1);
|
auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1);
|
||||||
newArgs.alloc("inNixShell").mkBool(true);
|
newArgs.alloc("inNixShell").mkBool(true);
|
||||||
for (auto & i : *autoArgs) newArgs.insert(i);
|
for (auto & i : *autoArgs) newArgs.insert(i);
|
||||||
autoArgs = newArgs.finish();
|
autoArgsWithInNixShell = newArgs.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (packages) {
|
if (packages) {
|
||||||
|
@ -316,10 +317,39 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
Value vRoot;
|
Value vRoot;
|
||||||
state->eval(e, vRoot);
|
state->eval(e, vRoot);
|
||||||
|
|
||||||
|
std::function<bool(const Value & v)> takesNixShellAttr;
|
||||||
|
takesNixShellAttr = [&](const Value & v) {
|
||||||
|
if (!runEnv) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bool add = false;
|
||||||
|
if (v.type() == nFunction && v.lambda.fun->hasFormals()) {
|
||||||
|
for (auto & i : v.lambda.fun->formals->formals) {
|
||||||
|
if (state->symbols[i.name] == "inNixShell") {
|
||||||
|
add = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return add;
|
||||||
|
};
|
||||||
|
|
||||||
for (auto & i : attrPaths) {
|
for (auto & i : attrPaths) {
|
||||||
Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first);
|
Value & v(*findAlongAttrPath(
|
||||||
|
*state,
|
||||||
|
i,
|
||||||
|
takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs,
|
||||||
|
vRoot
|
||||||
|
).first);
|
||||||
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||||
getDerivations(*state, v, "", *autoArgs, drvs, false);
|
getDerivations(
|
||||||
|
*state,
|
||||||
|
v,
|
||||||
|
"",
|
||||||
|
takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs,
|
||||||
|
drvs,
|
||||||
|
false
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -543,6 +573,8 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
|
|
||||||
restoreProcessContext();
|
restoreProcessContext();
|
||||||
|
|
||||||
|
logger->stop();
|
||||||
|
|
||||||
execvp(shell->c_str(), argPtrs.data());
|
execvp(shell->c_str(), argPtrs.data());
|
||||||
|
|
||||||
throw SysError("executing shell '%s'", *shell);
|
throw SysError("executing shell '%s'", *shell);
|
||||||
|
@ -601,6 +633,8 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
outPaths.push_back(outputPath);
|
outPaths.push_back(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger->stop();
|
||||||
|
|
||||||
for (auto & path : outPaths)
|
for (auto & path : outPaths)
|
||||||
std::cout << store->printStorePath(path) << '\n';
|
std::cout << store->printStorePath(path) << '\n';
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,7 @@ void removeOldGenerations(std::string dir)
|
||||||
link = readLink(path);
|
link = readLink(path);
|
||||||
} catch (SysError & e) {
|
} catch (SysError & e) {
|
||||||
if (e.errNo == ENOENT) continue;
|
if (e.errNo == ENOENT) continue;
|
||||||
|
throw;
|
||||||
}
|
}
|
||||||
if (link.find("link") != std::string::npos) {
|
if (link.find("link") != std::string::npos) {
|
||||||
printInfo(format("removing old generations of profile %1%") % path);
|
printInfo(format("removing old generations of profile %1%") % path);
|
||||||
|
|
|
@ -1485,7 +1485,7 @@ static int main_nix_env(int argc, char * * argv)
|
||||||
if (globals.profile == "")
|
if (globals.profile == "")
|
||||||
globals.profile = getDefaultProfile();
|
globals.profile = getDefaultProfile();
|
||||||
|
|
||||||
op(globals, opFlags, opArgs);
|
op(globals, std::move(opFlags), std::move(opArgs));
|
||||||
|
|
||||||
globals.state->printStats();
|
globals.state->printStats();
|
||||||
|
|
||||||
|
|
|
@ -1093,7 +1093,7 @@ static int main_nix_store(int argc, char * * argv)
|
||||||
if (op != opDump && op != opRestore) /* !!! hack */
|
if (op != opDump && op != opRestore) /* !!! hack */
|
||||||
store = openStore();
|
store = openStore();
|
||||||
|
|
||||||
op(opFlags, opArgs);
|
op(std::move(opFlags), std::move(opArgs));
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -276,15 +276,25 @@ struct Common : InstallableCommand, MixProfile
|
||||||
const BuildEnvironment & buildEnvironment,
|
const BuildEnvironment & buildEnvironment,
|
||||||
const Path & outputsDir = absPath(".") + "/outputs")
|
const Path & outputsDir = absPath(".") + "/outputs")
|
||||||
{
|
{
|
||||||
|
// A list of colon-separated environment variables that should be
|
||||||
|
// prepended to, rather than overwritten, in order to keep the shell usable.
|
||||||
|
// Please keep this list minimal in order to avoid impurities.
|
||||||
|
static const char * const savedVars[] = {
|
||||||
|
"PATH", // for commands
|
||||||
|
"XDG_DATA_DIRS", // for loadable completion
|
||||||
|
};
|
||||||
|
|
||||||
std::ostringstream out;
|
std::ostringstream out;
|
||||||
|
|
||||||
out << "unset shellHook\n";
|
out << "unset shellHook\n";
|
||||||
|
|
||||||
out << "nix_saved_PATH=\"$PATH\"\n";
|
for (auto & var : savedVars)
|
||||||
|
out << fmt("nix_saved_%s=\"$%s\"\n", var, var);
|
||||||
|
|
||||||
buildEnvironment.toBash(out, ignoreVars);
|
buildEnvironment.toBash(out, ignoreVars);
|
||||||
|
|
||||||
out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
|
for (auto & var : savedVars)
|
||||||
|
out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var);
|
||||||
|
|
||||||
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
|
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
|
||||||
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
|
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
|
||||||
|
|
|
@ -50,9 +50,9 @@ public:
|
||||||
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
|
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<FlakeRef> getFlakeRefForCompletion() override
|
std::vector<std::string> getFlakesForCompletion() override
|
||||||
{
|
{
|
||||||
return getFlakeRef();
|
return {flakeUrl};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -740,7 +740,8 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
"If you've set '%s' to a string, try using a path instead.",
|
"If you've set '%s' to a string, try using a path instead.",
|
||||||
templateDir, templateDirAttr->getAttrPathStr());
|
templateDir, templateDirAttr->getAttrPathStr());
|
||||||
|
|
||||||
std::vector<Path> files;
|
std::vector<Path> changedFiles;
|
||||||
|
std::vector<Path> conflictedFiles;
|
||||||
|
|
||||||
std::function<void(const Path & from, const Path & to)> copyDir;
|
std::function<void(const Path & from, const Path & to)> copyDir;
|
||||||
copyDir = [&](const Path & from, const Path & to)
|
copyDir = [&](const Path & from, const Path & to)
|
||||||
|
@ -757,31 +758,41 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
auto contents = readFile(from2);
|
auto contents = readFile(from2);
|
||||||
if (pathExists(to2)) {
|
if (pathExists(to2)) {
|
||||||
auto contents2 = readFile(to2);
|
auto contents2 = readFile(to2);
|
||||||
if (contents != contents2)
|
if (contents != contents2) {
|
||||||
throw Error("refusing to overwrite existing file '%s'", to2);
|
printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
|
||||||
|
conflictedFiles.push_back(to2);
|
||||||
|
} else {
|
||||||
|
notice("skipping identical file: %s", from2);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
} else
|
} else
|
||||||
writeFile(to2, contents);
|
writeFile(to2, contents);
|
||||||
}
|
}
|
||||||
else if (S_ISLNK(st.st_mode)) {
|
else if (S_ISLNK(st.st_mode)) {
|
||||||
auto target = readLink(from2);
|
auto target = readLink(from2);
|
||||||
if (pathExists(to2)) {
|
if (pathExists(to2)) {
|
||||||
if (readLink(to2) != target)
|
if (readLink(to2) != target) {
|
||||||
throw Error("refusing to overwrite existing symlink '%s'", to2);
|
printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
|
||||||
|
conflictedFiles.push_back(to2);
|
||||||
|
} else {
|
||||||
|
notice("skipping identical file: %s", from2);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
} else
|
} else
|
||||||
createSymlink(target, to2);
|
createSymlink(target, to2);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
throw Error("file '%s' has unsupported type", from2);
|
throw Error("file '%s' has unsupported type", from2);
|
||||||
files.push_back(to2);
|
changedFiles.push_back(to2);
|
||||||
notice("wrote: %s", to2);
|
notice("wrote: %s", to2);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
copyDir(templateDir, flakeDir);
|
copyDir(templateDir, flakeDir);
|
||||||
|
|
||||||
if (pathExists(flakeDir + "/.git")) {
|
if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) {
|
||||||
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
|
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
|
||||||
for (auto & s : files) args.push_back(s);
|
for (auto & s : changedFiles) args.push_back(s);
|
||||||
runProgram("git", true, args);
|
runProgram("git", true, args);
|
||||||
}
|
}
|
||||||
auto welcomeText = cursor->maybeGetAttr("welcomeText");
|
auto welcomeText = cursor->maybeGetAttr("welcomeText");
|
||||||
|
@ -789,6 +800,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
notice("\n");
|
notice("\n");
|
||||||
notice(renderMarkdownToTerminal(welcomeText->getString()));
|
notice(renderMarkdownToTerminal(welcomeText->getString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!conflictedFiles.empty())
|
||||||
|
throw Error("Encountered %d conflicts - see above", conflictedFiles.size());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ convert-secret-to-public` to get the corresponding public key for
|
||||||
verifying signed store paths.
|
verifying signed store paths.
|
||||||
|
|
||||||
The mandatory argument `--key-name` specifies a key name (such as
|
The mandatory argument `--key-name` specifies a key name (such as
|
||||||
`cache.example.org-1). It is used to look up keys on the client when
|
`cache.example.org-1`). It is used to look up keys on the client when
|
||||||
it verifies signatures. It can be anything, but it’s suggested to use
|
it verifies signatures. It can be anything, but it’s suggested to use
|
||||||
the host name of your cache (e.g. `cache.example.org`) with a suffix
|
the host name of your cache (e.g. `cache.example.org`) with a suffix
|
||||||
denoting the number of the key (to be incremented every time you need
|
denoting the number of the key (to be incremented every time you need
|
||||||
|
|
|
@ -266,6 +266,11 @@ void mainWrapped(int argc, char * * argv)
|
||||||
programPath = argv[0];
|
programPath = argv[0];
|
||||||
auto programName = std::string(baseNameOf(programPath));
|
auto programName = std::string(baseNameOf(programPath));
|
||||||
|
|
||||||
|
if (argc > 0 && std::string_view(argv[0]) == "__build-remote") {
|
||||||
|
programName = "build-remote";
|
||||||
|
argv++; argc--;
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
auto legacy = (*RegisterLegacyCommand::commands)[programName];
|
auto legacy = (*RegisterLegacyCommand::commands)[programName];
|
||||||
if (legacy) return legacy(argc, argv);
|
if (legacy) return legacy(argc, argv);
|
||||||
|
@ -342,7 +347,10 @@ void mainWrapped(int argc, char * * argv)
|
||||||
if (!completions) throw;
|
if (!completions) throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (completions) return;
|
if (completions) {
|
||||||
|
args.completionHook();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (args.showVersion) {
|
if (args.showVersion) {
|
||||||
printVersion(programName);
|
printVersion(programName);
|
||||||
|
@ -380,6 +388,9 @@ void mainWrapped(int argc, char * * argv)
|
||||||
settings.ttlPositiveNarInfoCache = 0;
|
settings.ttlPositiveNarInfoCache = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) {
|
||||||
|
evalSettings.pureEval = false;
|
||||||
|
}
|
||||||
args.command->second->prepare();
|
args.command->second->prepare();
|
||||||
args.command->second->run();
|
args.command->second->run();
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ highest precedence:
|
||||||
can be specified using the NixOS option `nix.registry`.
|
can be specified using the NixOS option `nix.registry`.
|
||||||
|
|
||||||
* The user registry `~/.config/nix/registry.json`. This registry can
|
* The user registry `~/.config/nix/registry.json`. This registry can
|
||||||
be modified by commands such as `nix flake pin`.
|
be modified by commands such as `nix registry pin`.
|
||||||
|
|
||||||
* Overrides specified on the command line using the option
|
* Overrides specified on the command line using the option
|
||||||
`--override-flake`.
|
`--override-flake`.
|
||||||
|
|
|
@ -24,10 +24,34 @@ R""(
|
||||||
* Interact with Nixpkgs in the REPL:
|
* Interact with Nixpkgs in the REPL:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
# nix repl '<nixpkgs>'
|
# nix repl --file example.nix
|
||||||
|
Loading Installable ''...
|
||||||
|
Added 3 variables.
|
||||||
|
|
||||||
Loading '<nixpkgs>'...
|
# nix repl --expr '{a={b=3;c=4;};}'
|
||||||
Added 12428 variables.
|
Loading Installable ''...
|
||||||
|
Added 1 variables.
|
||||||
|
|
||||||
|
# nix repl --expr '{a={b=3;c=4;};}' a
|
||||||
|
Loading Installable ''...
|
||||||
|
Added 1 variables.
|
||||||
|
|
||||||
|
# nix repl --extra_experimental_features 'flakes repl-flake' nixpkgs
|
||||||
|
Loading Installable 'flake:nixpkgs#'...
|
||||||
|
Added 5 variables.
|
||||||
|
|
||||||
|
nix-repl> legacyPackages.x86_64-linux.emacs.name
|
||||||
|
"emacs-27.1"
|
||||||
|
|
||||||
|
nix-repl> legacyPackages.x86_64-linux.emacs.name
|
||||||
|
"emacs-27.1"
|
||||||
|
|
||||||
|
nix-repl> :q
|
||||||
|
|
||||||
|
# nix repl --expr 'import <nixpkgs>{}'
|
||||||
|
|
||||||
|
Loading Installable ''...
|
||||||
|
Added 12439 variables.
|
||||||
|
|
||||||
nix-repl> emacs.name
|
nix-repl> emacs.name
|
||||||
"emacs-27.1"
|
"emacs-27.1"
|
||||||
|
|
|
@ -47,7 +47,7 @@ void runProgramInStore(ref<Store> store,
|
||||||
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
|
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
|
||||||
for (auto & arg : args) helperArgs.push_back(arg);
|
for (auto & arg : args) helperArgs.push_back(arg);
|
||||||
|
|
||||||
execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
|
execv(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data());
|
||||||
|
|
||||||
throw SysError("could not execute chroot helper");
|
throw SysError("could not execute chroot helper");
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,16 +18,26 @@ using namespace nix;
|
||||||
|
|
||||||
std::string wrap(std::string prefix, std::string s)
|
std::string wrap(std::string prefix, std::string s)
|
||||||
{
|
{
|
||||||
return prefix + s + ANSI_NORMAL;
|
return concatStrings(prefix, s, ANSI_NORMAL);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CmdSearch : InstallableCommand, MixJSON
|
struct CmdSearch : InstallableCommand, MixJSON
|
||||||
{
|
{
|
||||||
std::vector<std::string> res;
|
std::vector<std::string> res;
|
||||||
|
std::vector<std::string> excludeRes;
|
||||||
|
|
||||||
CmdSearch()
|
CmdSearch()
|
||||||
{
|
{
|
||||||
expectArgs("regex", &res);
|
expectArgs("regex", &res);
|
||||||
|
addFlag(Flag {
|
||||||
|
.longName = "exclude",
|
||||||
|
.shortName = 'e',
|
||||||
|
.description = "Hide packages whose attribute path, name or description contain *regex*.",
|
||||||
|
.labels = {"regex"},
|
||||||
|
.handler = {[this](std::string s) {
|
||||||
|
excludeRes.push_back(s);
|
||||||
|
}},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
|
@ -62,11 +72,16 @@ struct CmdSearch : InstallableCommand, MixJSON
|
||||||
res.push_back("^");
|
res.push_back("^");
|
||||||
|
|
||||||
std::vector<std::regex> regexes;
|
std::vector<std::regex> regexes;
|
||||||
|
std::vector<std::regex> excludeRegexes;
|
||||||
regexes.reserve(res.size());
|
regexes.reserve(res.size());
|
||||||
|
excludeRegexes.reserve(excludeRes.size());
|
||||||
|
|
||||||
for (auto & re : res)
|
for (auto & re : res)
|
||||||
regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase));
|
regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase));
|
||||||
|
|
||||||
|
for (auto & re : excludeRes)
|
||||||
|
excludeRegexes.emplace_back(re, std::regex::extended | std::regex::icase);
|
||||||
|
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
|
|
||||||
auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
|
auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
|
||||||
|
@ -106,6 +121,14 @@ struct CmdSearch : InstallableCommand, MixJSON
|
||||||
std::vector<std::smatch> nameMatches;
|
std::vector<std::smatch> nameMatches;
|
||||||
bool found = false;
|
bool found = false;
|
||||||
|
|
||||||
|
for (auto & regex : excludeRegexes) {
|
||||||
|
if (
|
||||||
|
std::regex_search(attrPath2, regex)
|
||||||
|
|| std::regex_search(name.name, regex)
|
||||||
|
|| std::regex_search(description, regex))
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
for (auto & regex : regexes) {
|
for (auto & regex : regexes) {
|
||||||
found = false;
|
found = false;
|
||||||
auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) {
|
auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) {
|
||||||
|
@ -133,15 +156,15 @@ struct CmdSearch : InstallableCommand, MixJSON
|
||||||
jsonElem.attr("version", name.version);
|
jsonElem.attr("version", name.version);
|
||||||
jsonElem.attr("description", description);
|
jsonElem.attr("description", description);
|
||||||
} else {
|
} else {
|
||||||
auto name2 = hiliteMatches(name.name, std::move(nameMatches), ANSI_GREEN, "\e[0;2m");
|
auto name2 = hiliteMatches(name.name, nameMatches, ANSI_GREEN, "\e[0;2m");
|
||||||
if (results > 1) logger->cout("");
|
if (results > 1) logger->cout("");
|
||||||
logger->cout(
|
logger->cout(
|
||||||
"* %s%s",
|
"* %s%s",
|
||||||
wrap("\e[0;1m", hiliteMatches(attrPath2, std::move(attrPathMatches), ANSI_GREEN, "\e[0;1m")),
|
wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")),
|
||||||
name.version != "" ? " (" + name.version + ")" : "");
|
name.version != "" ? " (" + name.version + ")" : "");
|
||||||
if (description != "")
|
if (description != "")
|
||||||
logger->cout(
|
logger->cout(
|
||||||
" %s", hiliteMatches(description, std::move(descriptionMatches), ANSI_GREEN, ANSI_NORMAL));
|
" %s", hiliteMatches(description, descriptionMatches, ANSI_GREEN, ANSI_NORMAL));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,12 +43,23 @@ R""(
|
||||||
# nix search nixpkgs 'firefox|chromium'
|
# nix search nixpkgs 'firefox|chromium'
|
||||||
```
|
```
|
||||||
|
|
||||||
* Search for packages containing `git'`and either `frontend` or `gui`:
|
* Search for packages containing `git` and either `frontend` or `gui`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
# nix search nixpkgs git 'frontend|gui'
|
# nix search nixpkgs git 'frontend|gui'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
* Search for packages containing `neovim` but hide ones containing either `gui` or `python`:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix search nixpkgs neovim -e 'python|gui'
|
||||||
|
```
|
||||||
|
or
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix search nixpkgs neovim -e 'python' -e 'gui'
|
||||||
|
```
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
`nix search` searches *installable* (which must be evaluatable, e.g. a
|
`nix search` searches *installable* (which must be evaluatable, e.g. a
|
||||||
|
|
|
@ -34,7 +34,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
return "upgrade Nix to the latest stable version";
|
return "upgrade Nix to the stable version declared in Nixpkgs";
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string doc() override
|
std::string doc() override
|
||||||
|
|
|
@ -2,7 +2,7 @@ R""(
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
* Upgrade Nix to the latest stable version:
|
* Upgrade Nix to the stable version declared in Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
# nix upgrade-nix
|
# nix upgrade-nix
|
||||||
|
@ -16,8 +16,11 @@ R""(
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
This command upgrades Nix to the latest version. By default, it
|
This command upgrades Nix to the stable version declared in Nixpkgs.
|
||||||
locates the directory containing the `nix` binary in the `$PATH`
|
This stable version is defined in [nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix)
|
||||||
|
and updated manually. It may not always be the latest tagged release.
|
||||||
|
|
||||||
|
By default, it locates the directory containing the `nix` binary in the `$PATH`
|
||||||
environment variable. If that directory is a Nix profile, it will
|
environment variable. If that directory is a Nix profile, it will
|
||||||
upgrade the `nix` package in that profile to the latest stable binary
|
upgrade the `nix` package in that profile to the latest stable binary
|
||||||
release.
|
release.
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
{ ... }@args: import ./shell.nix (args // { contentAddressed = true; })
|
{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; })
|
||||||
|
|
|
@ -75,7 +75,7 @@ rec {
|
||||||
buildCommand = ''
|
buildCommand = ''
|
||||||
mkdir -p $out/bin
|
mkdir -p $out/bin
|
||||||
echo ${rootCA} # Just to make it depend on it
|
echo ${rootCA} # Just to make it depend on it
|
||||||
echo "" > $out/bin/${name}
|
echo "#! ${shell}" > $out/bin/${name}
|
||||||
chmod +x $out/bin/${name}
|
chmod +x $out/bin/${name}
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
|
@ -50,6 +50,8 @@ export busybox="@sandbox_shell@"
|
||||||
export version=@PACKAGE_VERSION@
|
export version=@PACKAGE_VERSION@
|
||||||
export system=@system@
|
export system=@system@
|
||||||
|
|
||||||
|
export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@
|
||||||
|
|
||||||
export IMPURE_VAR1=foo
|
export IMPURE_VAR1=foo
|
||||||
export IMPURE_VAR2=bar
|
export IMPURE_VAR2=bar
|
||||||
|
|
||||||
|
@ -188,4 +190,15 @@ if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
|
||||||
startDaemon
|
startDaemon
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
onError() {
|
||||||
|
set +x
|
||||||
|
echo "$0: test failed at:" >&2
|
||||||
|
for ((i = 1; i < 16; i++)); do
|
||||||
|
if [[ -z ${BASH_SOURCE[i]} ]]; then break; fi
|
||||||
|
echo " ${FUNCNAME[i]} in ${BASH_SOURCE[i]}:${BASH_LINENO[i-1]}" >&2
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
trap onError ERR
|
||||||
|
|
||||||
fi # COMMON_SH_SOURCED
|
fi # COMMON_SH_SOURCED
|
||||||
|
|
|
@ -58,7 +58,7 @@ EOF
|
||||||
nix eval --file - <<EOF
|
nix eval --file - <<EOF
|
||||||
with (builtins.fromJSON (builtins.readFile ./flake.lock));
|
with (builtins.fromJSON (builtins.readFile ./flake.lock));
|
||||||
|
|
||||||
# Url inputs whose extension doesn’t match a know archive format should
|
# Url inputs whose extension doesn’t match a known archive format should
|
||||||
# not be unpacked by default
|
# not be unpacked by default
|
||||||
assert (nodes.no_ext_default_no_unpack.locked.type == "file");
|
assert (nodes.no_ext_default_no_unpack.locked.type == "file");
|
||||||
assert (nodes.no_ext_default_no_unpack.locked.unpack or false == false);
|
assert (nodes.no_ext_default_no_unpack.locked.unpack or false == false);
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
source common.sh
|
source common.sh
|
||||||
|
|
||||||
clearStore
|
cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
|
||||||
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
|
|
||||||
|
|
||||||
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
|
|
||||||
|
|
||||||
cd $TEST_HOME
|
cd $TEST_HOME
|
||||||
|
|
||||||
|
@ -25,6 +22,7 @@ cat <<EOF > flake.nix
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
nix build .#
|
nix build .#
|
||||||
nix bundle --bundler .# .#
|
nix bundle --bundler .# .#
|
||||||
nix bundle --bundler .#bundlers.$system.default .#packages.$system.default
|
nix bundle --bundler .#bundlers.$system.default .#packages.$system.default
|
||||||
|
@ -32,6 +30,3 @@ nix bundle --bundler .#bundlers.$system.simple .#packages.$system.default
|
||||||
|
|
||||||
nix bundle --bundler .#bundlers.$system.default .#apps.$system.default
|
nix bundle --bundler .#bundlers.$system.default .#apps.$system.default
|
||||||
nix bundle --bundler .#bundlers.$system.simple .#apps.$system.default
|
nix bundle --bundler .#bundlers.$system.simple .#apps.$system.default
|
||||||
|
|
||||||
clearStore
|
|
||||||
|
|
89
tests/flakes/check.sh
Normal file
89
tests/flakes/check.sh
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
flakeDir=$TEST_ROOT/flake3
|
||||||
|
mkdir -p $flakeDir
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
overlay = final: prev: {
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
nix flake check $flakeDir
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
overlay = finalll: prev: {
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
(! nix flake check $flakeDir)
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
nixosModules.foo = {
|
||||||
|
a.b.c = 123;
|
||||||
|
foo = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
nix flake check $flakeDir
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
nixosModules.foo = {
|
||||||
|
a.b.c = 123;
|
||||||
|
foo = assert false; true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
(! nix flake check $flakeDir)
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
nixosModule = { config, pkgs, ... }: {
|
||||||
|
a.b.c = 123;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
nix flake check $flakeDir
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
nixosModule = { config, pkgs }: {
|
||||||
|
a.b.c = 123;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
(! nix flake check $flakeDir)
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
packages.system-1.default = "foo";
|
||||||
|
packages.system-2.default = "bar";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
checkRes=$(nix flake check --keep-going $flakeDir 2>&1 && fail "nix flake check should have failed" || true)
|
||||||
|
echo "$checkRes" | grep -q "packages.system-1.default"
|
||||||
|
echo "$checkRes" | grep -q "packages.system-2.default"
|
49
tests/flakes/circular.sh
Normal file
49
tests/flakes/circular.sh
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
# Test circular flake dependencies.
|
||||||
|
source ./common.sh
|
||||||
|
|
||||||
|
requireGit
|
||||||
|
|
||||||
|
flakeA=$TEST_ROOT/flakeA
|
||||||
|
flakeB=$TEST_ROOT/flakeB
|
||||||
|
|
||||||
|
createGitRepo $flakeA
|
||||||
|
createGitRepo $flakeB
|
||||||
|
|
||||||
|
cat > $flakeA/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
inputs.b.url = git+file://$flakeB;
|
||||||
|
inputs.b.inputs.a.follows = "/";
|
||||||
|
|
||||||
|
outputs = { self, b }: {
|
||||||
|
foo = 123 + b.bar;
|
||||||
|
xyzzy = 1000;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git -C $flakeA add flake.nix
|
||||||
|
|
||||||
|
cat > $flakeB/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
inputs.a.url = git+file://$flakeA;
|
||||||
|
|
||||||
|
outputs = { self, a }: {
|
||||||
|
bar = 456 + a.xyzzy;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git -C $flakeB add flake.nix
|
||||||
|
git -C $flakeB commit -a -m 'Foo'
|
||||||
|
|
||||||
|
[[ $(nix eval $flakeA#foo) = 1579 ]]
|
||||||
|
[[ $(nix eval $flakeA#foo) = 1579 ]]
|
||||||
|
|
||||||
|
sed -i $flakeB/flake.nix -e 's/456/789/'
|
||||||
|
git -C $flakeB commit -a -m 'Foo'
|
||||||
|
|
||||||
|
[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
|
||||||
|
|
||||||
|
# Test list-inputs with circular dependencies
|
||||||
|
nix flake metadata $flakeA
|
||||||
|
|
73
tests/flakes/common.sh
Normal file
73
tests/flakes/common.sh
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
source ../common.sh
|
||||||
|
|
||||||
|
registry=$TEST_ROOT/registry.json
|
||||||
|
|
||||||
|
requireGit() {
|
||||||
|
if [[ -z $(type -p git) ]]; then
|
||||||
|
echo "Git not installed; skipping flake tests"
|
||||||
|
exit 99
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
writeSimpleFlake() {
|
||||||
|
local flakeDir="$1"
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Bla bla";
|
||||||
|
|
||||||
|
outputs = inputs: rec {
|
||||||
|
packages.$system = rec {
|
||||||
|
foo = import ./simple.nix;
|
||||||
|
default = foo;
|
||||||
|
};
|
||||||
|
|
||||||
|
# To test "nix flake init".
|
||||||
|
legacyPackages.x86_64-linux.hello = import ./simple.nix;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cp ../simple.nix ../simple.builder.sh ../config.nix $flakeDir/
|
||||||
|
}
|
||||||
|
|
||||||
|
createSimpleGitFlake() {
|
||||||
|
local flakeDir="$1"
|
||||||
|
writeSimpleFlake $flakeDir
|
||||||
|
git -C $flakeDir add flake.nix simple.nix simple.builder.sh config.nix
|
||||||
|
git -C $flakeDir commit -m 'Initial'
|
||||||
|
}
|
||||||
|
|
||||||
|
writeDependentFlake() {
|
||||||
|
local flakeDir="$1"
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self, flake1 }: {
|
||||||
|
packages.$system.default = flake1.packages.$system.default;
|
||||||
|
expr = assert builtins.pathExists ./flake.lock; 123;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
writeTrivialFlake() {
|
||||||
|
local flakeDir="$1"
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
expr = 123;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
createGitRepo() {
|
||||||
|
local repo="$1"
|
||||||
|
local extraArgs="$2"
|
||||||
|
|
||||||
|
rm -rf $repo $repo.tmp
|
||||||
|
mkdir -p $repo
|
||||||
|
|
||||||
|
git -C $repo init $extraArgs
|
||||||
|
git -C $repo config user.email "foobar@example.com"
|
||||||
|
git -C $repo config user.name "Foobar"
|
||||||
|
}
|
|
@ -1,9 +1,6 @@
|
||||||
source common.sh
|
source common.sh
|
||||||
|
|
||||||
clearStore
|
cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
|
||||||
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
|
|
||||||
|
|
||||||
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
|
|
||||||
|
|
||||||
cd $TEST_HOME
|
cd $TEST_HOME
|
||||||
|
|
|
@ -1,67 +1,30 @@
|
||||||
source common.sh
|
source ./common.sh
|
||||||
|
|
||||||
if [[ -z $(type -p git) ]]; then
|
requireGit
|
||||||
echo "Git not installed; skipping flake tests"
|
|
||||||
exit 99
|
|
||||||
fi
|
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
|
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
|
||||||
|
|
||||||
registry=$TEST_ROOT/registry.json
|
|
||||||
|
|
||||||
flake1Dir=$TEST_ROOT/flake1
|
flake1Dir=$TEST_ROOT/flake1
|
||||||
flake2Dir=$TEST_ROOT/flake2
|
flake2Dir=$TEST_ROOT/flake2
|
||||||
flake3Dir=$TEST_ROOT/flake3
|
flake3Dir=$TEST_ROOT/flake3
|
||||||
flake5Dir=$TEST_ROOT/flake5
|
flake5Dir=$TEST_ROOT/flake5
|
||||||
flake6Dir=$TEST_ROOT/flake6
|
|
||||||
flake7Dir=$TEST_ROOT/flake7
|
flake7Dir=$TEST_ROOT/flake7
|
||||||
templatesDir=$TEST_ROOT/templates
|
|
||||||
nonFlakeDir=$TEST_ROOT/nonFlake
|
nonFlakeDir=$TEST_ROOT/nonFlake
|
||||||
badFlakeDir=$TEST_ROOT/badFlake
|
badFlakeDir=$TEST_ROOT/badFlake
|
||||||
flakeA=$TEST_ROOT/flakeA
|
|
||||||
flakeB=$TEST_ROOT/flakeB
|
|
||||||
flakeGitBare=$TEST_ROOT/flakeGitBare
|
flakeGitBare=$TEST_ROOT/flakeGitBare
|
||||||
flakeFollowsA=$TEST_ROOT/follows/flakeA
|
|
||||||
flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
|
|
||||||
flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
|
|
||||||
flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
|
|
||||||
flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
|
|
||||||
|
|
||||||
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do
|
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $nonFlakeDir; do
|
||||||
rm -rf $repo $repo.tmp
|
# Give one repo a non-main initial branch.
|
||||||
mkdir -p $repo
|
|
||||||
|
|
||||||
# Give one repo a non-master initial branch.
|
|
||||||
extraArgs=
|
extraArgs=
|
||||||
if [[ $repo == $flake2Dir ]]; then
|
if [[ $repo == $flake2Dir ]]; then
|
||||||
extraArgs="--initial-branch=main"
|
extraArgs="--initial-branch=main"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
git -C $repo init $extraArgs
|
createGitRepo "$repo" "$extraArgs"
|
||||||
git -C $repo config user.email "foobar@example.com"
|
|
||||||
git -C $repo config user.name "Foobar"
|
|
||||||
done
|
done
|
||||||
|
|
||||||
cat > $flake1Dir/flake.nix <<EOF
|
createSimpleGitFlake $flake1Dir
|
||||||
{
|
|
||||||
description = "Bla bla";
|
|
||||||
|
|
||||||
outputs = inputs: rec {
|
|
||||||
packages.$system = rec {
|
|
||||||
foo = import ./simple.nix;
|
|
||||||
default = foo;
|
|
||||||
};
|
|
||||||
|
|
||||||
# To test "nix flake init".
|
|
||||||
legacyPackages.x86_64-linux.hello = import ./simple.nix;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cp ./simple.nix ./simple.builder.sh ./config.nix $flake1Dir/
|
|
||||||
git -C $flake1Dir add flake.nix simple.nix simple.builder.sh config.nix
|
|
||||||
git -C $flake1Dir commit -m 'Initial'
|
|
||||||
|
|
||||||
cat > $flake2Dir/flake.nix <<EOF
|
cat > $flake2Dir/flake.nix <<EOF
|
||||||
{
|
{
|
||||||
|
@ -105,12 +68,10 @@ nix registry add --registry $registry flake1 git+file://$flake1Dir
|
||||||
nix registry add --registry $registry flake2 git+file://$flake2Dir
|
nix registry add --registry $registry flake2 git+file://$flake2Dir
|
||||||
nix registry add --registry $registry flake3 git+file://$flake3Dir
|
nix registry add --registry $registry flake3 git+file://$flake3Dir
|
||||||
nix registry add --registry $registry flake4 flake3
|
nix registry add --registry $registry flake4 flake3
|
||||||
nix registry add --registry $registry flake5 hg+file://$flake5Dir
|
|
||||||
nix registry add --registry $registry nixpkgs flake1
|
nix registry add --registry $registry nixpkgs flake1
|
||||||
nix registry add --registry $registry templates git+file://$templatesDir
|
|
||||||
|
|
||||||
# Test 'nix flake list'.
|
# Test 'nix flake list'.
|
||||||
[[ $(nix registry list | wc -l) == 7 ]]
|
[[ $(nix registry list | wc -l) == 5 ]]
|
||||||
|
|
||||||
# Test 'nix flake metadata'.
|
# Test 'nix flake metadata'.
|
||||||
nix flake metadata flake1
|
nix flake metadata flake1
|
||||||
|
@ -173,11 +134,11 @@ nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file
|
||||||
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
|
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
|
||||||
nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file
|
nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file
|
||||||
[[ -e $flake2Dir/flake.lock ]]
|
[[ -e $flake2Dir/flake.lock ]]
|
||||||
[[ -z $(git -C $flake2Dir diff master) ]]
|
[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
|
||||||
|
|
||||||
# Rerunning the build should not change the lockfile.
|
# Rerunning the build should not change the lockfile.
|
||||||
nix build -o $TEST_ROOT/result $flake2Dir#bar
|
nix build -o $TEST_ROOT/result $flake2Dir#bar
|
||||||
[[ -z $(git -C $flake2Dir diff master) ]]
|
[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
|
||||||
|
|
||||||
# Building with a lockfile should not require a fetch of the registry.
|
# Building with a lockfile should not require a fetch of the registry.
|
||||||
nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh
|
nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh
|
||||||
|
@ -186,7 +147,7 @@ nix build -o $TEST_ROOT/result --no-use-registries $flake2Dir#bar --refresh
|
||||||
|
|
||||||
# Updating the flake should not change the lockfile.
|
# Updating the flake should not change the lockfile.
|
||||||
nix flake lock $flake2Dir
|
nix flake lock $flake2Dir
|
||||||
[[ -z $(git -C $flake2Dir diff master) ]]
|
[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
|
||||||
|
|
||||||
# Now we should be able to build the flake in pure mode.
|
# Now we should be able to build the flake in pure mode.
|
||||||
nix build -o $TEST_ROOT/result flake2#bar
|
nix build -o $TEST_ROOT/result flake2#bar
|
||||||
|
@ -221,7 +182,7 @@ nix build -o $TEST_ROOT/result $flake3Dir#"sth sth"
|
||||||
nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth"
|
nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth"
|
||||||
|
|
||||||
# Check whether it saved the lockfile
|
# Check whether it saved the lockfile
|
||||||
(! [[ -z $(git -C $flake3Dir diff master) ]])
|
[[ -n $(git -C $flake3Dir diff master) ]]
|
||||||
|
|
||||||
git -C $flake3Dir add flake.lock
|
git -C $flake3Dir add flake.lock
|
||||||
|
|
||||||
|
@ -291,7 +252,7 @@ cat > $flake3Dir/flake.nix <<EOF
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
cp ./config.nix $flake3Dir
|
cp ../config.nix $flake3Dir
|
||||||
|
|
||||||
git -C $flake3Dir add flake.nix config.nix
|
git -C $flake3Dir add flake.nix config.nix
|
||||||
git -C $flake3Dir commit -m 'Add nonFlakeInputs'
|
git -C $flake3Dir commit -m 'Add nonFlakeInputs'
|
||||||
|
@ -321,10 +282,10 @@ nix build -o $TEST_ROOT/result flake4#xyzzy
|
||||||
|
|
||||||
# Test 'nix flake update' and --override-flake.
|
# Test 'nix flake update' and --override-flake.
|
||||||
nix flake lock $flake3Dir
|
nix flake lock $flake3Dir
|
||||||
[[ -z $(git -C $flake3Dir diff master) ]]
|
[[ -z $(git -C $flake3Dir diff master || echo failed) ]]
|
||||||
|
|
||||||
nix flake update $flake3Dir --override-flake flake2 nixpkgs
|
nix flake update $flake3Dir --override-flake flake2 nixpkgs
|
||||||
[[ ! -z $(git -C $flake3Dir diff master) ]]
|
[[ ! -z $(git -C $flake3Dir diff master || echo failed) ]]
|
||||||
|
|
||||||
# Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore
|
# Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore
|
||||||
git -C $flake3Dir checkout -b removeXyzzy
|
git -C $flake3Dir checkout -b removeXyzzy
|
||||||
|
@ -366,161 +327,19 @@ nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth
|
||||||
|
|
||||||
# Testing the nix CLI
|
# Testing the nix CLI
|
||||||
nix registry add flake1 flake3
|
nix registry add flake1 flake3
|
||||||
[[ $(nix registry list | wc -l) == 8 ]]
|
[[ $(nix registry list | wc -l) == 6 ]]
|
||||||
nix registry pin flake1
|
nix registry pin flake1
|
||||||
[[ $(nix registry list | wc -l) == 8 ]]
|
[[ $(nix registry list | wc -l) == 6 ]]
|
||||||
nix registry pin flake1 flake3
|
nix registry pin flake1 flake3
|
||||||
[[ $(nix registry list | wc -l) == 8 ]]
|
[[ $(nix registry list | wc -l) == 6 ]]
|
||||||
nix registry remove flake1
|
nix registry remove flake1
|
||||||
[[ $(nix registry list | wc -l) == 7 ]]
|
[[ $(nix registry list | wc -l) == 5 ]]
|
||||||
|
|
||||||
# Test 'nix flake init'.
|
|
||||||
cat > $templatesDir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Some templates";
|
|
||||||
|
|
||||||
outputs = { self }: {
|
|
||||||
templates = rec {
|
|
||||||
trivial = {
|
|
||||||
path = ./trivial;
|
|
||||||
description = "A trivial flake";
|
|
||||||
welcomeText = ''
|
|
||||||
Welcome to my trivial flake
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
default = trivial;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
mkdir $templatesDir/trivial
|
|
||||||
|
|
||||||
cat > $templatesDir/trivial/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "A flake for building Hello World";
|
|
||||||
|
|
||||||
outputs = { self, nixpkgs }: {
|
|
||||||
packages.x86_64-linux = rec {
|
|
||||||
hello = nixpkgs.legacyPackages.x86_64-linux.hello;
|
|
||||||
default = hello;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git -C $templatesDir add flake.nix trivial/flake.nix
|
|
||||||
git -C $templatesDir commit -m 'Initial'
|
|
||||||
|
|
||||||
nix flake check templates
|
|
||||||
nix flake show templates
|
|
||||||
nix flake show templates --json | jq
|
|
||||||
|
|
||||||
(cd $flake7Dir && nix flake init)
|
|
||||||
(cd $flake7Dir && nix flake init) # check idempotence
|
|
||||||
git -C $flake7Dir add flake.nix
|
|
||||||
nix flake check $flake7Dir
|
|
||||||
nix flake show $flake7Dir
|
|
||||||
nix flake show $flake7Dir --json | jq
|
|
||||||
git -C $flake7Dir commit -a -m 'Initial'
|
|
||||||
|
|
||||||
# Test 'nix flake new'.
|
|
||||||
rm -rf $flake6Dir
|
|
||||||
nix flake new -t templates#trivial $flake6Dir
|
|
||||||
nix flake new -t templates#trivial $flake6Dir # check idempotence
|
|
||||||
nix flake check $flake6Dir
|
|
||||||
|
|
||||||
# Test 'nix flake clone'.
|
# Test 'nix flake clone'.
|
||||||
rm -rf $TEST_ROOT/flake1-v2
|
rm -rf $TEST_ROOT/flake1-v2
|
||||||
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
|
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
|
||||||
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
|
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
|
||||||
|
|
||||||
# More 'nix flake check' tests.
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
overlay = final: prev: {
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
nix flake check $flake3Dir
|
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
overlay = finalll: prev: {
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
(! nix flake check $flake3Dir)
|
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
nixosModules.foo = {
|
|
||||||
a.b.c = 123;
|
|
||||||
foo = true;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
nix flake check $flake3Dir
|
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
nixosModules.foo = {
|
|
||||||
a.b.c = 123;
|
|
||||||
foo = assert false; true;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
(! nix flake check $flake3Dir)
|
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
nixosModule = { config, pkgs, ... }: {
|
|
||||||
a.b.c = 123;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
nix flake check $flake3Dir
|
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
nixosModule = { config, pkgs }: {
|
|
||||||
a.b.c = 123;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
(! nix flake check $flake3Dir)
|
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { flake1, self }: {
|
|
||||||
packages.system-1.default = "foo";
|
|
||||||
packages.system-2.default = "bar";
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
checkRes=$(nix flake check --keep-going $flake3Dir 2>&1 && fail "nix flake check should have failed" || true)
|
|
||||||
echo "$checkRes" | grep -q "packages.system-1.default"
|
|
||||||
echo "$checkRes" | grep -q "packages.system-2.default"
|
|
||||||
|
|
||||||
# Test 'follows' inputs.
|
# Test 'follows' inputs.
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
cat > $flake3Dir/flake.nix <<EOF
|
||||||
{
|
{
|
||||||
|
@ -563,6 +382,10 @@ nix flake lock $flake3Dir
|
||||||
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]]
|
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]]
|
||||||
|
|
||||||
# Test overriding inputs of inputs.
|
# Test overriding inputs of inputs.
|
||||||
|
writeTrivialFlake $flake7Dir
|
||||||
|
git -C $flake7Dir add flake.nix
|
||||||
|
git -C $flake7Dir commit -m 'Initial'
|
||||||
|
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
cat > $flake3Dir/flake.nix <<EOF
|
||||||
{
|
{
|
||||||
inputs.flake2.inputs.flake1 = {
|
inputs.flake2.inputs.flake1 = {
|
||||||
|
@ -597,50 +420,9 @@ rm -rf $flakeGitBare
|
||||||
git clone --bare $flake1Dir $flakeGitBare
|
git clone --bare $flake1Dir $flakeGitBare
|
||||||
nix build -o $TEST_ROOT/result git+file://$flakeGitBare
|
nix build -o $TEST_ROOT/result git+file://$flakeGitBare
|
||||||
|
|
||||||
# Test Mercurial flakes.
|
|
||||||
rm -rf $flake5Dir
|
|
||||||
mkdir $flake5Dir
|
|
||||||
|
|
||||||
cat > $flake5Dir/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
outputs = { self, flake1 }: {
|
|
||||||
packages.$system.default = flake1.packages.$system.default;
|
|
||||||
expr = assert builtins.pathExists ./flake.lock; 123;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
if [[ -n $(type -p hg) ]]; then
|
|
||||||
hg init $flake5Dir
|
|
||||||
|
|
||||||
hg add $flake5Dir/flake.nix
|
|
||||||
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit'
|
|
||||||
|
|
||||||
nix build -o $TEST_ROOT/result hg+file://$flake5Dir
|
|
||||||
[[ -e $TEST_ROOT/result/hello ]]
|
|
||||||
|
|
||||||
(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
|
|
||||||
|
|
||||||
nix eval hg+file://$flake5Dir#expr
|
|
||||||
|
|
||||||
nix eval hg+file://$flake5Dir#expr
|
|
||||||
|
|
||||||
(! nix eval hg+file://$flake5Dir#expr --no-allow-dirty)
|
|
||||||
|
|
||||||
(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
|
|
||||||
|
|
||||||
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file'
|
|
||||||
|
|
||||||
nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision
|
|
||||||
nix flake metadata --json hg+file://$flake5Dir
|
|
||||||
[[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
|
|
||||||
|
|
||||||
nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty
|
|
||||||
nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-use-registries --no-allow-dirty
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Test path flakes.
|
# Test path flakes.
|
||||||
rm -rf $flake5Dir/.hg $flake5Dir/flake.lock
|
mkdir -p $flake5Dir
|
||||||
|
writeDependentFlake $flake5Dir
|
||||||
nix flake lock path://$flake5Dir
|
nix flake lock path://$flake5Dir
|
||||||
|
|
||||||
# Test tarball flakes.
|
# Test tarball flakes.
|
||||||
|
@ -678,165 +460,6 @@ nix flake lock $flake3Dir --update-input flake2/flake1
|
||||||
# Test 'nix flake metadata --json'.
|
# Test 'nix flake metadata --json'.
|
||||||
nix flake metadata $flake3Dir --json | jq .
|
nix flake metadata $flake3Dir --json | jq .
|
||||||
|
|
||||||
# Test circular flake dependencies.
|
|
||||||
cat > $flakeA/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
inputs.b.url = git+file://$flakeB;
|
|
||||||
inputs.b.inputs.a.follows = "/";
|
|
||||||
|
|
||||||
outputs = { self, nixpkgs, b }: {
|
|
||||||
foo = 123 + b.bar;
|
|
||||||
xyzzy = 1000;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git -C $flakeA add flake.nix
|
|
||||||
|
|
||||||
cat > $flakeB/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
inputs.a.url = git+file://$flakeA;
|
|
||||||
|
|
||||||
outputs = { self, nixpkgs, a }: {
|
|
||||||
bar = 456 + a.xyzzy;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git -C $flakeB add flake.nix
|
|
||||||
git -C $flakeB commit -a -m 'Foo'
|
|
||||||
|
|
||||||
[[ $(nix eval $flakeA#foo) = 1579 ]]
|
|
||||||
[[ $(nix eval $flakeA#foo) = 1579 ]]
|
|
||||||
|
|
||||||
sed -i $flakeB/flake.nix -e 's/456/789/'
|
|
||||||
git -C $flakeB commit -a -m 'Foo'
|
|
||||||
|
|
||||||
[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
|
|
||||||
|
|
||||||
# Test list-inputs with circular dependencies
|
|
||||||
nix flake metadata $flakeA
|
|
||||||
|
|
||||||
# Test flake follow paths
|
|
||||||
mkdir -p $flakeFollowsB
|
|
||||||
mkdir -p $flakeFollowsC
|
|
||||||
mkdir -p $flakeFollowsD
|
|
||||||
mkdir -p $flakeFollowsE
|
|
||||||
|
|
||||||
cat > $flakeFollowsA/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake A";
|
|
||||||
inputs = {
|
|
||||||
B = {
|
|
||||||
url = "path:./flakeB";
|
|
||||||
inputs.foobar.follows = "foobar";
|
|
||||||
};
|
|
||||||
|
|
||||||
foobar.url = "path:$flakeFollowsA/flakeE";
|
|
||||||
};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat > $flakeFollowsB/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake B";
|
|
||||||
inputs = {
|
|
||||||
foobar.url = "path:$flakeFollowsA/flakeE";
|
|
||||||
goodoo.follows = "C/goodoo";
|
|
||||||
C = {
|
|
||||||
url = "path:./flakeC";
|
|
||||||
inputs.foobar.follows = "foobar";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat > $flakeFollowsC/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake C";
|
|
||||||
inputs = {
|
|
||||||
foobar.url = "path:$flakeFollowsA/flakeE";
|
|
||||||
goodoo.follows = "foobar";
|
|
||||||
};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat > $flakeFollowsD/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake D";
|
|
||||||
inputs = {};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat > $flakeFollowsE/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake E";
|
|
||||||
inputs = {};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
|
|
||||||
flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
|
|
||||||
|
|
||||||
nix flake metadata $flakeFollowsA
|
|
||||||
|
|
||||||
nix flake update $flakeFollowsA
|
|
||||||
|
|
||||||
oldLock="$(cat "$flakeFollowsA/flake.lock")"
|
|
||||||
|
|
||||||
# Ensure that locking twice doesn't change anything
|
|
||||||
|
|
||||||
nix flake lock $flakeFollowsA
|
|
||||||
|
|
||||||
newLock="$(cat "$flakeFollowsA/flake.lock")"
|
|
||||||
|
|
||||||
diff <(echo "$newLock") <(echo "$oldLock")
|
|
||||||
|
|
||||||
[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
|
|
||||||
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
|
|
||||||
[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
|
|
||||||
|
|
||||||
# Ensure removing follows from flake.nix removes them from the lockfile
|
|
||||||
|
|
||||||
cat > $flakeFollowsA/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake A";
|
|
||||||
inputs = {
|
|
||||||
B = {
|
|
||||||
url = "path:./flakeB";
|
|
||||||
inputs.nonFlake.follows = "D";
|
|
||||||
};
|
|
||||||
D.url = "path:./flakeD";
|
|
||||||
};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
nix flake lock $flakeFollowsA
|
|
||||||
|
|
||||||
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
|
|
||||||
jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
|
|
||||||
|
|
||||||
# Ensure a relative path is not allowed to go outside the store path
|
|
||||||
cat > $flakeFollowsA/flake.nix <<EOF
|
|
||||||
{
|
|
||||||
description = "Flake A";
|
|
||||||
inputs = {
|
|
||||||
B.url = "path:../flakeB";
|
|
||||||
};
|
|
||||||
outputs = { ... }: {};
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git -C $flakeFollowsA add flake.nix
|
|
||||||
|
|
||||||
nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
|
|
||||||
|
|
||||||
# Test flake in store does not evaluate
|
# Test flake in store does not evaluate
|
||||||
rm -rf $badFlakeDir
|
rm -rf $badFlakeDir
|
||||||
mkdir $badFlakeDir
|
mkdir $badFlakeDir
|
150
tests/flakes/follow-paths.sh
Normal file
150
tests/flakes/follow-paths.sh
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
source ./common.sh
|
||||||
|
|
||||||
|
requireGit
|
||||||
|
|
||||||
|
flakeFollowsA=$TEST_ROOT/follows/flakeA
|
||||||
|
flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
|
||||||
|
flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
|
||||||
|
flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
|
||||||
|
flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
|
||||||
|
|
||||||
|
# Test following path flakerefs.
|
||||||
|
createGitRepo $flakeFollowsA
|
||||||
|
mkdir -p $flakeFollowsB
|
||||||
|
mkdir -p $flakeFollowsC
|
||||||
|
mkdir -p $flakeFollowsD
|
||||||
|
mkdir -p $flakeFollowsE
|
||||||
|
|
||||||
|
cat > $flakeFollowsA/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake A";
|
||||||
|
inputs = {
|
||||||
|
B = {
|
||||||
|
url = "path:./flakeB";
|
||||||
|
inputs.foobar.follows = "foobar";
|
||||||
|
};
|
||||||
|
|
||||||
|
foobar.url = "path:$flakeFollowsA/flakeE";
|
||||||
|
};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > $flakeFollowsB/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake B";
|
||||||
|
inputs = {
|
||||||
|
foobar.url = "path:$flakeFollowsA/flakeE";
|
||||||
|
goodoo.follows = "C/goodoo";
|
||||||
|
C = {
|
||||||
|
url = "path:./flakeC";
|
||||||
|
inputs.foobar.follows = "foobar";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > $flakeFollowsC/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake C";
|
||||||
|
inputs = {
|
||||||
|
foobar.url = "path:$flakeFollowsA/flakeE";
|
||||||
|
goodoo.follows = "foobar";
|
||||||
|
};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > $flakeFollowsD/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake D";
|
||||||
|
inputs = {};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > $flakeFollowsE/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake E";
|
||||||
|
inputs = {};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
|
||||||
|
flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
|
||||||
|
|
||||||
|
nix flake metadata $flakeFollowsA
|
||||||
|
|
||||||
|
nix flake update $flakeFollowsA
|
||||||
|
|
||||||
|
nix flake lock $flakeFollowsA
|
||||||
|
|
||||||
|
oldLock="$(cat "$flakeFollowsA/flake.lock")"
|
||||||
|
|
||||||
|
# Ensure that locking twice doesn't change anything
|
||||||
|
|
||||||
|
nix flake lock $flakeFollowsA
|
||||||
|
|
||||||
|
newLock="$(cat "$flakeFollowsA/flake.lock")"
|
||||||
|
|
||||||
|
diff <(echo "$newLock") <(echo "$oldLock")
|
||||||
|
|
||||||
|
[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
|
||||||
|
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
|
||||||
|
[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
|
||||||
|
|
||||||
|
# Ensure removing follows from flake.nix removes them from the lockfile
|
||||||
|
|
||||||
|
cat > $flakeFollowsA/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake A";
|
||||||
|
inputs = {
|
||||||
|
B = {
|
||||||
|
url = "path:./flakeB";
|
||||||
|
};
|
||||||
|
D.url = "path:./flakeD";
|
||||||
|
};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
nix flake lock $flakeFollowsA
|
||||||
|
|
||||||
|
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
|
||||||
|
jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
|
||||||
|
|
||||||
|
# Ensure a relative path is not allowed to go outside the store path
|
||||||
|
cat > $flakeFollowsA/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake A";
|
||||||
|
inputs = {
|
||||||
|
B.url = "path:../flakeB";
|
||||||
|
};
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git -C $flakeFollowsA add flake.nix
|
||||||
|
|
||||||
|
nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
|
||||||
|
|
||||||
|
# Non-existant follows should print a warning.
|
||||||
|
cat >$flakeFollowsA/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Flake A";
|
||||||
|
inputs.B = {
|
||||||
|
url = "path:./flakeB";
|
||||||
|
inputs.invalid.follows = "D";
|
||||||
|
inputs.invalid2.url = "path:./flakeD";
|
||||||
|
};
|
||||||
|
inputs.D.url = "path:./flakeD";
|
||||||
|
outputs = { ... }: {};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git -C $flakeFollowsA add flake.nix
|
||||||
|
|
||||||
|
nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'"
|
||||||
|
nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'"
|
87
tests/flakes/init.sh
Normal file
87
tests/flakes/init.sh
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
source ./common.sh
|
||||||
|
|
||||||
|
requireGit
|
||||||
|
|
||||||
|
templatesDir=$TEST_ROOT/templates
|
||||||
|
flakeDir=$TEST_ROOT/flake
|
||||||
|
nixpkgsDir=$TEST_ROOT/nixpkgs
|
||||||
|
|
||||||
|
nix registry add --registry $registry templates git+file://$templatesDir
|
||||||
|
nix registry add --registry $registry nixpkgs git+file://$nixpkgsDir
|
||||||
|
|
||||||
|
createGitRepo $nixpkgsDir
|
||||||
|
createSimpleGitFlake $nixpkgsDir
|
||||||
|
|
||||||
|
# Test 'nix flake init'.
|
||||||
|
createGitRepo $templatesDir
|
||||||
|
|
||||||
|
cat > $templatesDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "Some templates";
|
||||||
|
|
||||||
|
outputs = { self }: {
|
||||||
|
templates = rec {
|
||||||
|
trivial = {
|
||||||
|
path = ./trivial;
|
||||||
|
description = "A trivial flake";
|
||||||
|
welcomeText = ''
|
||||||
|
Welcome to my trivial flake
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
default = trivial;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
mkdir $templatesDir/trivial
|
||||||
|
|
||||||
|
cat > $templatesDir/trivial/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
description = "A flake for building Hello World";
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs }: {
|
||||||
|
packages.x86_64-linux = rec {
|
||||||
|
hello = nixpkgs.legacyPackages.x86_64-linux.hello;
|
||||||
|
default = hello;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
echo a > $templatesDir/trivial/a
|
||||||
|
echo b > $templatesDir/trivial/b
|
||||||
|
|
||||||
|
git -C $templatesDir add flake.nix trivial/
|
||||||
|
git -C $templatesDir commit -m 'Initial'
|
||||||
|
|
||||||
|
nix flake check templates
|
||||||
|
nix flake show templates
|
||||||
|
nix flake show templates --json | jq
|
||||||
|
|
||||||
|
createGitRepo $flakeDir
|
||||||
|
(cd $flakeDir && nix flake init)
|
||||||
|
(cd $flakeDir && nix flake init) # check idempotence
|
||||||
|
git -C $flakeDir add flake.nix
|
||||||
|
nix flake check $flakeDir
|
||||||
|
nix flake show $flakeDir
|
||||||
|
nix flake show $flakeDir --json | jq
|
||||||
|
git -C $flakeDir commit -a -m 'Initial'
|
||||||
|
|
||||||
|
# Test 'nix flake init' with benign conflicts
|
||||||
|
createGitRepo "$flakeDir"
|
||||||
|
echo a > $flakeDir/a
|
||||||
|
(cd $flakeDir && nix flake init) # check idempotence
|
||||||
|
|
||||||
|
# Test 'nix flake init' with conflicts
|
||||||
|
createGitRepo "$flakeDir"
|
||||||
|
echo b > $flakeDir/a
|
||||||
|
pushd $flakeDir
|
||||||
|
(! nix flake init) |& grep "refusing to overwrite existing file '$flakeDir/a'"
|
||||||
|
popd
|
||||||
|
git -C $flakeDir commit -a -m 'Changed'
|
||||||
|
|
||||||
|
# Test 'nix flake new'.
|
||||||
|
rm -rf $flakeDir
|
||||||
|
nix flake new -t templates#trivial $flakeDir
|
||||||
|
nix flake new -t templates#trivial $flakeDir # check idempotence
|
||||||
|
nix flake check $flakeDir
|
46
tests/flakes/mercurial.sh
Normal file
46
tests/flakes/mercurial.sh
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
source ./common.sh
|
||||||
|
|
||||||
|
if [[ -z $(type -p hg) ]]; then
|
||||||
|
echo "Mercurial not installed; skipping"
|
||||||
|
exit 99
|
||||||
|
fi
|
||||||
|
|
||||||
|
flake1Dir=$TEST_ROOT/flake-hg1
|
||||||
|
mkdir -p $flake1Dir
|
||||||
|
writeSimpleFlake $flake1Dir
|
||||||
|
hg init $flake1Dir
|
||||||
|
|
||||||
|
nix registry add --registry $registry flake1 hg+file://$flake1Dir
|
||||||
|
|
||||||
|
flake2Dir=$TEST_ROOT/flake-hg2
|
||||||
|
mkdir -p $flake2Dir
|
||||||
|
writeDependentFlake $flake2Dir
|
||||||
|
hg init $flake2Dir
|
||||||
|
|
||||||
|
hg add $flake1Dir/*
|
||||||
|
hg commit --config ui.username=foobar@example.org $flake1Dir -m 'Initial commit'
|
||||||
|
|
||||||
|
hg add $flake2Dir/flake.nix
|
||||||
|
hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Initial commit'
|
||||||
|
|
||||||
|
nix build -o $TEST_ROOT/result hg+file://$flake2Dir
|
||||||
|
[[ -e $TEST_ROOT/result/hello ]]
|
||||||
|
|
||||||
|
(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
|
||||||
|
|
||||||
|
nix eval hg+file://$flake2Dir#expr
|
||||||
|
|
||||||
|
nix eval hg+file://$flake2Dir#expr
|
||||||
|
|
||||||
|
(! nix eval hg+file://$flake2Dir#expr --no-allow-dirty)
|
||||||
|
|
||||||
|
(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
|
||||||
|
|
||||||
|
hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Add lock file'
|
||||||
|
|
||||||
|
nix flake metadata --json hg+file://$flake2Dir --refresh | jq -e -r .revision
|
||||||
|
nix flake metadata --json hg+file://$flake2Dir
|
||||||
|
[[ $(nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revCount) = 1 ]]
|
||||||
|
|
||||||
|
nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-registries --no-allow-dirty
|
||||||
|
nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-use-registries --no-allow-dirty
|
|
@ -1,8 +1,8 @@
|
||||||
source common.sh
|
source ../common.sh
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
|
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
|
||||||
cp ./shell-hello.nix ./config.nix $TEST_HOME
|
cp ../shell-hello.nix ../config.nix $TEST_HOME
|
||||||
cd $TEST_HOME
|
cd $TEST_HOME
|
||||||
|
|
||||||
cat <<EOF > flake.nix
|
cat <<EOF > flake.nix
|
|
@ -1,15 +1,11 @@
|
||||||
source common.sh
|
source common.sh
|
||||||
|
|
||||||
if [[ -z $(type -p git) ]]; then
|
|
||||||
echo "Git not installed; skipping flake search tests"
|
|
||||||
exit 99
|
|
||||||
fi
|
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
|
|
||||||
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
|
writeSimpleFlake $TEST_HOME
|
||||||
cd $TEST_HOME
|
cd $TEST_HOME
|
||||||
mkdir -p foo/subdir
|
mkdir -p foo/subdir
|
||||||
|
|
||||||
echo '{ outputs = _: {}; }' > foo/flake.nix
|
echo '{ outputs = _: {}; }' > foo/flake.nix
|
||||||
cat <<EOF > flake.nix
|
cat <<EOF > flake.nix
|
||||||
{
|
{
|
||||||
|
@ -43,10 +39,12 @@ nix build --override-input foo . || fail "flake should search up directories whe
|
||||||
sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix
|
sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix
|
||||||
! nix build || fail "flake should not search upwards when part of inputs"
|
! nix build || fail "flake should not search upwards when part of inputs"
|
||||||
|
|
||||||
pushd subdir
|
if [[ -n $(type -p git) ]]; then
|
||||||
git init
|
pushd subdir
|
||||||
for i in "${success[@]}" "${failure[@]}"; do
|
git init
|
||||||
|
for i in "${success[@]}" "${failure[@]}"; do
|
||||||
! nix build $i || fail "flake should not search past a git repository"
|
! nix build $i || fail "flake should not search past a git repository"
|
||||||
done
|
done
|
||||||
rm -rf .git
|
rm -rf .git
|
||||||
popd
|
popd
|
||||||
|
fi
|
|
@ -18,7 +18,12 @@ cat << EOF > flake.nix
|
||||||
with import ./config.nix;
|
with import ./config.nix;
|
||||||
mkDerivation {
|
mkDerivation {
|
||||||
name = "formatter";
|
name = "formatter";
|
||||||
buildCommand = "mkdir -p \$out/bin; cp \${./fmt.simple.sh} \$out/bin/formatter";
|
buildCommand = ''
|
||||||
|
mkdir -p \$out/bin
|
||||||
|
echo "#! ${shell}" > \$out/bin/formatter
|
||||||
|
cat \${./fmt.simple.sh} >> \$out/bin/formatter
|
||||||
|
chmod +x \$out/bin/formatter
|
||||||
|
'';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
# Generate a fake root CA and a fake github.com certificate.
|
# Generate a fake root CA and a fake api.github.com / channels.nixos.org certificate.
|
||||||
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
|
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
|
||||||
''
|
''
|
||||||
mkdir -p $out
|
mkdir -p $out
|
||||||
|
@ -18,7 +18,7 @@ let
|
||||||
|
|
||||||
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
|
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
|
||||||
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
|
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
|
||||||
openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \
|
openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:channels.nixos.org") \
|
||||||
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
|
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ makeTest (
|
||||||
name = "github-flakes";
|
name = "github-flakes";
|
||||||
|
|
||||||
nodes =
|
nodes =
|
||||||
{ # Impersonate github.com and api.github.com.
|
{
|
||||||
github =
|
github =
|
||||||
{ config, pkgs, ... }:
|
{ config, pkgs, ... }:
|
||||||
{ networking.firewall.allowedTCPPorts = [ 80 443 ];
|
{ networking.firewall.allowedTCPPorts = [ 80 443 ];
|
||||||
|
@ -77,12 +77,12 @@ makeTest (
|
||||||
services.httpd.extraConfig = ''
|
services.httpd.extraConfig = ''
|
||||||
ErrorLog syslog:local6
|
ErrorLog syslog:local6
|
||||||
'';
|
'';
|
||||||
services.httpd.virtualHosts."github.com" =
|
services.httpd.virtualHosts."channels.nixos.org" =
|
||||||
{ forceSSL = true;
|
{ forceSSL = true;
|
||||||
sslServerKey = "${cert}/server.key";
|
sslServerKey = "${cert}/server.key";
|
||||||
sslServerCert = "${cert}/server.crt";
|
sslServerCert = "${cert}/server.crt";
|
||||||
servedDirs =
|
servedDirs =
|
||||||
[ { urlPath = "/NixOS/flake-registry/raw/master";
|
[ { urlPath = "/";
|
||||||
dir = registry;
|
dir = registry;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
@ -103,13 +103,13 @@ makeTest (
|
||||||
{ config, lib, pkgs, nodes, ... }:
|
{ config, lib, pkgs, nodes, ... }:
|
||||||
{ virtualisation.writableStore = true;
|
{ virtualisation.writableStore = true;
|
||||||
virtualisation.diskSize = 2048;
|
virtualisation.diskSize = 2048;
|
||||||
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
|
virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
|
||||||
virtualisation.memorySize = 4096;
|
virtualisation.memorySize = 4096;
|
||||||
nix.binaryCaches = lib.mkForce [ ];
|
nix.binaryCaches = lib.mkForce [ ];
|
||||||
nix.extraOptions = "experimental-features = nix-command flakes";
|
nix.extraOptions = "experimental-features = nix-command flakes";
|
||||||
environment.systemPackages = [ pkgs.jq ];
|
environment.systemPackages = [ pkgs.jq ];
|
||||||
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
|
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
|
||||||
[ "github.com" "api.github.com" "raw.githubusercontent.com" ];
|
[ "channels.nixos.org" "api.github.com" ];
|
||||||
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
|
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -123,7 +123,7 @@ makeTest (
|
||||||
|
|
||||||
github.wait_for_unit("httpd.service")
|
github.wait_for_unit("httpd.service")
|
||||||
|
|
||||||
client.succeed("curl -v https://github.com/ >&2")
|
client.succeed("curl -v https://api.github.com/ >&2")
|
||||||
client.succeed("nix registry list | grep nixpkgs")
|
client.succeed("nix registry list | grep nixpkgs")
|
||||||
|
|
||||||
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
|
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
|
||||||
|
|
|
@ -5,6 +5,8 @@ export NIX_REMOTE=dummy://
|
||||||
|
|
||||||
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
|
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
|
||||||
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
|
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
|
||||||
|
nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello
|
||||||
|
(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello)
|
||||||
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
|
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
|
||||||
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
|
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
nix_tests = \
|
nix_tests = \
|
||||||
flakes.sh \
|
flakes/flakes.sh \
|
||||||
flakes-run.sh \
|
flakes/run.sh \
|
||||||
|
flakes/mercurial.sh \
|
||||||
|
flakes/circular.sh \
|
||||||
|
flakes/init.sh \
|
||||||
|
flakes/follow-paths.sh \
|
||||||
|
flakes/bundle.sh \
|
||||||
|
flakes/check.sh \
|
||||||
ca/gc.sh \
|
ca/gc.sh \
|
||||||
gc.sh \
|
gc.sh \
|
||||||
remote-store.sh \
|
remote-store.sh \
|
||||||
|
@ -44,7 +50,7 @@ nix_tests = \
|
||||||
secure-drv-outputs.sh \
|
secure-drv-outputs.sh \
|
||||||
restricted.sh \
|
restricted.sh \
|
||||||
fetchGitSubmodules.sh \
|
fetchGitSubmodules.sh \
|
||||||
flake-searching.sh \
|
flakes/search-root.sh \
|
||||||
ca/duplicate-realisation-in-closure.sh \
|
ca/duplicate-realisation-in-closure.sh \
|
||||||
readfile-context.sh \
|
readfile-context.sh \
|
||||||
nix-channel.sh \
|
nix-channel.sh \
|
||||||
|
@ -80,7 +86,7 @@ nix_tests = \
|
||||||
nix-copy-ssh.sh \
|
nix-copy-ssh.sh \
|
||||||
post-hook.sh \
|
post-hook.sh \
|
||||||
function-trace.sh \
|
function-trace.sh \
|
||||||
flake-local-settings.sh \
|
flakes/config.sh \
|
||||||
fmt.sh \
|
fmt.sh \
|
||||||
eval-store.sh \
|
eval-store.sh \
|
||||||
why-depends.sh \
|
why-depends.sh \
|
||||||
|
@ -115,4 +121,8 @@ tests-environment = NIX_REMOTE= $(bash) -e
|
||||||
|
|
||||||
clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix
|
clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix
|
||||||
|
|
||||||
test-deps += tests/common.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT)
|
test-deps += tests/common.sh tests/config.nix tests/ca/config.nix
|
||||||
|
|
||||||
|
ifeq ($(BUILD_SHARED_LIBS), 1)
|
||||||
|
test-deps += tests/plugins/libplugintest.$(SO_EXT)
|
||||||
|
endif
|
||||||
|
|
|
@ -14,7 +14,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
|
||||||
{ client =
|
{ client =
|
||||||
{ config, lib, pkgs, ... }:
|
{ config, lib, pkgs, ... }:
|
||||||
{ virtualisation.writableStore = true;
|
{ virtualisation.writableStore = true;
|
||||||
virtualisation.pathsInNixDB = [ pkgA pkgD.drvPath ];
|
virtualisation.additionalPaths = [ pkgA pkgD.drvPath ];
|
||||||
nix.binaryCaches = lib.mkForce [ ];
|
nix.binaryCaches = lib.mkForce [ ];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
|
||||||
{ config, pkgs, ... }:
|
{ config, pkgs, ... }:
|
||||||
{ services.openssh.enable = true;
|
{ services.openssh.enable = true;
|
||||||
virtualisation.writableStore = true;
|
virtualisation.writableStore = true;
|
||||||
virtualisation.pathsInNixDB = [ pkgB pkgC ];
|
virtualisation.additionalPaths = [ pkgB pkgC ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -102,3 +102,11 @@ source <(nix print-dev-env -f "$shellDotNix" shellDrv)
|
||||||
[[ ${arr2[1]} = $'\n' ]]
|
[[ ${arr2[1]} = $'\n' ]]
|
||||||
[[ ${arr2[2]} = $'x\ny' ]]
|
[[ ${arr2[2]} = $'x\ny' ]]
|
||||||
[[ $(fun) = blabla ]]
|
[[ $(fun) = blabla ]]
|
||||||
|
|
||||||
|
# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs)
|
||||||
|
cat >$TEST_ROOT/shell-ellipsis.nix <<EOF
|
||||||
|
{ system ? "x86_64-linux", ... }@args:
|
||||||
|
assert (!(args ? inNixShell));
|
||||||
|
(import $shellDotNix { }).shellDrv
|
||||||
|
EOF
|
||||||
|
nix-shell $TEST_ROOT/shell-ellipsis.nix --run "true"
|
||||||
|
|
|
@ -5,6 +5,42 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
|
||||||
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
|
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let
|
||||||
|
nix-fetch = pkgs.writeText "fetch.nix" ''
|
||||||
|
derivation {
|
||||||
|
# This derivation is an copy from what is available over at
|
||||||
|
# nix.git:corepkgs/fetchurl.nix
|
||||||
|
builder = "builtin:fetchurl";
|
||||||
|
|
||||||
|
# We're going to fetch data from the http_dns instance created before
|
||||||
|
# we expect the content to be the same as the content available there.
|
||||||
|
# ```
|
||||||
|
# $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
|
||||||
|
# 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
|
||||||
|
# ```
|
||||||
|
outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
|
||||||
|
outputHashAlgo = "sha256";
|
||||||
|
outputHashMode = "flat";
|
||||||
|
|
||||||
|
name = "example.com";
|
||||||
|
url = "http://example.com";
|
||||||
|
|
||||||
|
unpack = false;
|
||||||
|
executable = false;
|
||||||
|
|
||||||
|
system = "builtin";
|
||||||
|
|
||||||
|
preferLocalBuild = true;
|
||||||
|
|
||||||
|
impureEnvVars = [
|
||||||
|
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
||||||
|
];
|
||||||
|
|
||||||
|
urls = [ "http://example.com" ];
|
||||||
|
}
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
|
||||||
makeTest (
|
makeTest (
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
|
@ -68,40 +104,6 @@ rec {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
nix-fetch = pkgs.writeText "fetch.nix" ''
|
|
||||||
derivation {
|
|
||||||
# This derivation is an copy from what is available over at
|
|
||||||
# nix.git:corepkgs/fetchurl.nix
|
|
||||||
builder = "builtin:fetchurl";
|
|
||||||
|
|
||||||
# We're going to fetch data from the http_dns instance created before
|
|
||||||
# we expect the content to be the same as the content available there.
|
|
||||||
# ```
|
|
||||||
# $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
|
|
||||||
# 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
|
|
||||||
# ```
|
|
||||||
outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
|
|
||||||
outputHashAlgo = "sha256";
|
|
||||||
outputHashMode = "flat";
|
|
||||||
|
|
||||||
name = "example.com";
|
|
||||||
url = "http://example.com";
|
|
||||||
|
|
||||||
unpack = false;
|
|
||||||
executable = false;
|
|
||||||
|
|
||||||
system = "builtin";
|
|
||||||
|
|
||||||
preferLocalBuild = true;
|
|
||||||
|
|
||||||
impureEnvVars = [
|
|
||||||
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
|
||||||
];
|
|
||||||
|
|
||||||
urls = [ "http://example.com" ];
|
|
||||||
}
|
|
||||||
'';
|
|
||||||
|
|
||||||
testScript = { nodes, ... }: ''
|
testScript = { nodes, ... }: ''
|
||||||
http_dns.wait_for_unit("nginx")
|
http_dns.wait_for_unit("nginx")
|
||||||
http_dns.wait_for_open_port(80)
|
http_dns.wait_for_open_port(80)
|
||||||
|
|
|
@ -2,6 +2,11 @@ source common.sh
|
||||||
|
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
|
if [[ $BUILD_SHARED_LIBS != 1 ]]; then
|
||||||
|
echo "plugins are not supported"
|
||||||
|
exit 99
|
||||||
|
fi
|
||||||
|
|
||||||
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
|
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
|
||||||
|
|
||||||
[ "$res"x = "nullx" ]
|
[ "$res"x = "nullx" ]
|
||||||
|
|
|
@ -30,3 +30,5 @@ nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ x = "foo" + "
|
||||||
|
|
||||||
rm -rf $TEST_ROOT/eval-out
|
rm -rf $TEST_ROOT/eval-out
|
||||||
(! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }')
|
(! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }')
|
||||||
|
|
||||||
|
(! nix eval --expr '~/foo')
|
||||||
|
|
|
@ -61,7 +61,7 @@ in
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
virtualisation.writableStore = true;
|
virtualisation.writableStore = true;
|
||||||
virtualisation.pathsInNixDB = [ config.system.build.extraUtils ];
|
virtualisation.additionalPaths = [ config.system.build.extraUtils ];
|
||||||
nix.binaryCaches = lib.mkForce [ ];
|
nix.binaryCaches = lib.mkForce [ ];
|
||||||
programs.ssh.extraConfig = "ConnectTimeout 30";
|
programs.ssh.extraConfig = "ConnectTimeout 30";
|
||||||
};
|
};
|
||||||
|
|
|
@ -42,6 +42,11 @@ testRepl () {
|
||||||
echo "$replOutput"
|
echo "$replOutput"
|
||||||
echo "$replOutput" | grep -qs "while evaluating the file" \
|
echo "$replOutput" | grep -qs "while evaluating the file" \
|
||||||
|| fail "nix repl --show-trace doesn't show the trace"
|
|| fail "nix repl --show-trace doesn't show the trace"
|
||||||
|
|
||||||
|
nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \
|
||||||
|
| grep "attribute 'currentSystem' missing"
|
||||||
|
nix repl "${nixArgs[@]}" 2>&1 <<< "builtins.currentSystem" \
|
||||||
|
| grep "$(nix-instantiate --eval -E 'builtins.currentSystem')"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Simple test, try building a drv
|
# Simple test, try building a drv
|
||||||
|
@ -50,15 +55,17 @@ testRepl
|
||||||
testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR"
|
testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR"
|
||||||
|
|
||||||
testReplResponse () {
|
testReplResponse () {
|
||||||
local response="$(nix repl <<< "$1")"
|
local commands="$1"; shift
|
||||||
echo "$response" | grep -qs "$2" \
|
local expectedResponse="$1"; shift
|
||||||
|
local response="$(nix repl "$@" <<< "$commands")"
|
||||||
|
echo "$response" | grep -qs "$expectedResponse" \
|
||||||
|| fail "repl command set:
|
|| fail "repl command set:
|
||||||
|
|
||||||
$1
|
$commands
|
||||||
|
|
||||||
does not respond with:
|
does not respond with:
|
||||||
|
|
||||||
$2
|
$expectedResponse
|
||||||
|
|
||||||
but with:
|
but with:
|
||||||
|
|
||||||
|
@ -71,3 +78,48 @@ testReplResponse '
|
||||||
:a { a = "2"; }
|
:a { a = "2"; }
|
||||||
"result: ${a}"
|
"result: ${a}"
|
||||||
' "result: 2"
|
' "result: 2"
|
||||||
|
|
||||||
|
testReplResponse '
|
||||||
|
drvPath
|
||||||
|
' '".*-simple.drv"' \
|
||||||
|
$testDir/simple.nix
|
||||||
|
|
||||||
|
testReplResponse '
|
||||||
|
drvPath
|
||||||
|
' '".*-simple.drv"' \
|
||||||
|
--file $testDir/simple.nix --experimental-features 'ca-derivations'
|
||||||
|
|
||||||
|
testReplResponse '
|
||||||
|
drvPath
|
||||||
|
' '".*-simple.drv"' \
|
||||||
|
--file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations'
|
||||||
|
|
||||||
|
mkdir -p flake && cat <<EOF > flake/flake.nix
|
||||||
|
{
|
||||||
|
outputs = { self }: {
|
||||||
|
foo = 1;
|
||||||
|
bar.baz = 2;
|
||||||
|
|
||||||
|
changingThing = "beforeChange";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
testReplResponse '
|
||||||
|
foo + baz
|
||||||
|
' "3" \
|
||||||
|
./flake ./flake\#bar --experimental-features 'flakes repl-flake'
|
||||||
|
|
||||||
|
# Test the `:reload` mechansim with flakes:
|
||||||
|
# - Eval `./flake#changingThing`
|
||||||
|
# - Modify the flake
|
||||||
|
# - Re-eval it
|
||||||
|
# - Check that the result has changed
|
||||||
|
replResult=$( (
|
||||||
|
echo "changingThing"
|
||||||
|
sleep 1 # Leave the repl the time to eval 'foo'
|
||||||
|
sed -i 's/beforeChange/afterChange/' flake/flake.nix
|
||||||
|
echo ":reload"
|
||||||
|
echo "changingThing"
|
||||||
|
) | nix repl ./flake --experimental-features 'flakes repl-flake')
|
||||||
|
echo "$replResult" | grep -qs beforeChange
|
||||||
|
echo "$replResult" | grep -qs afterChange
|
||||||
|
|
|
@ -28,11 +28,19 @@ nix search -f search.nix '' |grep -q hello
|
||||||
|
|
||||||
e=$'\x1b' # grep doesn't support \e, \033 or even \x1b
|
e=$'\x1b' # grep doesn't support \e, \033 or even \x1b
|
||||||
# Multiple overlapping regexes
|
# Multiple overlapping regexes
|
||||||
(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep "$e\[32;1mfoo$e\\[0;1m" | wc -l) == 1 ))
|
(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep -c "$e\[32;1mfoo$e\\[0;1m") == 1 ))
|
||||||
(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep "$e\[32;1mbroken bar$e\\[0m" | wc -l) == 1 ))
|
(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep -c "$e\[32;1mbroken bar$e\\[0m") == 1 ))
|
||||||
|
|
||||||
# Multiple matches
|
# Multiple matches
|
||||||
# Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello
|
# Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello
|
||||||
(( $(nix search -f search.nix '' 'o' | grep -Eo "$e\[32;1mo{1,2}$e\[(0|0;1)m" | wc -l) == 3 ))
|
(( $(nix search -f search.nix '' 'o' | grep -Eoc "$e\[32;1mo{1,2}$e\[(0|0;1)m") == 3 ))
|
||||||
# Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar'
|
# Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar'
|
||||||
|
# NOTE: This does not work with `grep -c` because it counts the two 'b's in 'broken bar' as one matched line
|
||||||
(( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 ))
|
(( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 ))
|
||||||
|
|
||||||
|
## Tests for --exclude
|
||||||
|
(( $(nix search -f search.nix -e hello | grep -c hello) == 0 ))
|
||||||
|
|
||||||
|
(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 ))
|
||||||
|
(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 ))
|
||||||
|
[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]]
|
||||||
|
|
|
@ -10,12 +10,12 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
|
||||||
makeTest {
|
makeTest {
|
||||||
name = "setuid";
|
name = "setuid";
|
||||||
|
|
||||||
machine =
|
nodes.machine =
|
||||||
{ config, lib, pkgs, ... }:
|
{ config, lib, pkgs, ... }:
|
||||||
{ virtualisation.writableStore = true;
|
{ virtualisation.writableStore = true;
|
||||||
nix.binaryCaches = lib.mkForce [ ];
|
nix.binaryCaches = lib.mkForce [ ];
|
||||||
nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ];
|
nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ];
|
||||||
virtualisation.pathsInNixDB = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
|
virtualisation.additionalPaths = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
|
||||||
};
|
};
|
||||||
|
|
||||||
testScript = { nodes }: ''
|
testScript = { nodes }: ''
|
||||||
|
|
|
@ -59,7 +59,7 @@ let
|
||||||
echo 'ref: refs/heads/master' > $out/HEAD
|
echo 'ref: refs/heads/master' > $out/HEAD
|
||||||
|
|
||||||
mkdir -p $out/info
|
mkdir -p $out/info
|
||||||
echo -e '${nixpkgs.rev}\trefs/heads/master' > $out/info/refs
|
echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs
|
||||||
'';
|
'';
|
||||||
|
|
||||||
in
|
in
|
||||||
|
@ -106,7 +106,7 @@ makeTest (
|
||||||
{
|
{
|
||||||
virtualisation.writableStore = true;
|
virtualisation.writableStore = true;
|
||||||
virtualisation.diskSize = 2048;
|
virtualisation.diskSize = 2048;
|
||||||
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
|
virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
|
||||||
virtualisation.memorySize = 4096;
|
virtualisation.memorySize = 4096;
|
||||||
nix.binaryCaches = lib.mkForce [ ];
|
nix.binaryCaches = lib.mkForce [ ];
|
||||||
nix.extraOptions = ''
|
nix.extraOptions = ''
|
||||||
|
@ -132,6 +132,17 @@ makeTest (
|
||||||
client.succeed("curl -v https://git.sr.ht/ >&2")
|
client.succeed("curl -v https://git.sr.ht/ >&2")
|
||||||
client.succeed("nix registry list | grep nixpkgs")
|
client.succeed("nix registry list | grep nixpkgs")
|
||||||
|
|
||||||
|
# Test that it resolves HEAD
|
||||||
|
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs --json | jq -r .revision")
|
||||||
|
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
|
||||||
|
# Test that it resolves branches
|
||||||
|
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/master --json | jq -r .revision")
|
||||||
|
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
|
||||||
|
# Test that it resolves tags
|
||||||
|
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/foo-bar --json | jq -r .revision")
|
||||||
|
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
|
||||||
|
|
||||||
|
# Registry and pinning test
|
||||||
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
|
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
|
||||||
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
|
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue