mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 14:06:16 +02:00
Merge remote-tracking branch 'origin/master' into libgit2
This commit is contained in:
commit
98a120b8b8
197 changed files with 5187 additions and 3152 deletions
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
|||
fetch-depth: 0
|
||||
- name: Create backport PRs
|
||||
# should be kept in sync with `version`
|
||||
uses: zeebe-io/backport-action@v2.0.0
|
||||
uses: zeebe-io/backport-action@v2.1.0
|
||||
with:
|
||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
# Release X.Y (202?-??-??)
|
||||
|
||||
- The experimental nix command can now act as a [shebang interpreter](@docroot@/command-ref/new-cli/nix.md#shebang-interpreter)
|
||||
by appending the contents of any `#! nix` lines and the script's location to a single call.
|
||||
|
||||
- [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters.
|
||||
|
||||
- [Path-like flake references](@docroot@/command-ref/new-cli/nix3-flake.md#path-like-syntax) now accept arbitrary unicode characters (except `#` and `?`).
|
||||
|
@ -14,8 +17,8 @@
|
|||
|
||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||
|
||||
- `builtins.fetchTree` is now marked as stable.
|
||||
|
||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||
As described in the document for that feature, this is because we anticipate polishing it and then stabilizing it before the rest of Flakes.
|
||||
|
||||
- The interface for creating and updating lock files has been overhauled:
|
||||
|
||||
|
@ -29,3 +32,44 @@
|
|||
|
||||
- The flake-specific flags `--recreate-lock-file` and `--update-input` have been removed from all commands operating on installables.
|
||||
They are superceded by `nix flake update`.
|
||||
|
||||
- Commit signature verification for the [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) is added as the new [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md)
|
||||
(experimental) now returns a JSON map rather than JSON list.
|
||||
The `path` field of each object has instead become the key in th outer map, since it is unique.
|
||||
The `valid` field also goes away because we just use null instead.
|
||||
|
||||
- Old way:
|
||||
|
||||
```json5
|
||||
[
|
||||
{
|
||||
"path": "/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15",
|
||||
"valid": true,
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"path": "/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path",
|
||||
"valid": false
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
- New way
|
||||
|
||||
```json5
|
||||
{
|
||||
"/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15": {
|
||||
// ...
|
||||
},
|
||||
"/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path": null,
|
||||
}
|
||||
```
|
||||
|
||||
This makes it match `nix derivation show`, which also maps store paths to information.
|
||||
|
||||
- When Nix is installed using the [binary installer](@docroot@/installation/installing-binary.md), in supported shells (Bash, Zsh, Fish)
|
||||
[`XDG_DATA_DIRS`](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html#variables) is now populated with the path to the `/share` subdirectory of the current profile.
|
||||
This means that command completion scripts, `.desktop` files, and similar artifacts installed via [`nix-env`](@docroot@/command-ref/nix-env.md) or [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md)
|
||||
(experimental) can be found by any program that follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
|
||||
|
|
|
@ -185,6 +185,7 @@
|
|||
buildPackages.git
|
||||
buildPackages.mercurial # FIXME: remove? only needed for tests
|
||||
buildPackages.jq # Also for custom mdBook preprocessor.
|
||||
buildPackages.openssh # only needed for tests (ssh-keygen)
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Motivation
|
||||
|
||||
The team's main responsibility is to set a direction for the development of Nix and ensure that the code is in good shape.
|
||||
The team's main responsibility is to guide and direct the development of Nix and ensure that the code is in good shape.
|
||||
|
||||
We aim to achieve this by improving the contributor experience and attracting more maintainers – that is, by helping other people contributing to Nix and eventually taking responsibility – in order to scale the development process to match users' needs.
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
#include "derivations.hh"
|
||||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
#include "util.hh"
|
||||
#include "crypto.hh"
|
||||
|
||||
#include <sodium.h>
|
||||
|
|
|
@ -19,6 +19,14 @@ set __ETC_PROFILE_NIX_SOURCED 1
|
|||
|
||||
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||
|
||||
# Populate bash completions, .desktop files, etc
|
||||
if test -z "$XDG_DATA_DIRS"
|
||||
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||
set --export XDG_DATA_DIRS "/usr/local/share:/usr/share:/nix/var/nix/profiles/default/share"
|
||||
else
|
||||
set --export XDG_DATA_DIRS "$XDG_DATA_DIRS:/nix/var/nix/profiles/default/share"
|
||||
end
|
||||
|
||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||
if test -n "$NIX_SSH_CERT_FILE"
|
||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||
|
|
|
@ -30,6 +30,14 @@ fi
|
|||
|
||||
export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK"
|
||||
|
||||
# Populate bash completions, .desktop files, etc
|
||||
if [ -z "$XDG_DATA_DIRS" ]; then
|
||||
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||
export XDG_DATA_DIRS="/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||
else
|
||||
export XDG_DATA_DIRS="$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||
fi
|
||||
|
||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||
if [ -n "${NIX_SSL_CERT_FILE:-}" ]; then
|
||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||
|
|
|
@ -20,6 +20,14 @@ if test -n "$HOME" && test -n "$USER"
|
|||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||
|
||||
# Populate bash completions, .desktop files, etc
|
||||
if test -z "$XDG_DATA_DIRS"
|
||||
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||
set --export XDG_DATA_DIRS "/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||
else
|
||||
set --export XDG_DATA_DIRS "$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||
end
|
||||
|
||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||
if test -n "$NIX_SSH_CERT_FILE"
|
||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||
|
|
|
@ -32,6 +32,14 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
|||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||
export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK"
|
||||
|
||||
# Populate bash completions, .desktop files, etc
|
||||
if [ -z "$XDG_DATA_DIRS" ]; then
|
||||
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||
export XDG_DATA_DIRS="/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||
else
|
||||
export XDG_DATA_DIRS="$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||
fi
|
||||
|
||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||
if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "derived-path.hh"
|
||||
#include "realisation.hh"
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
|
|||
throw UsageError("'--all' does not expect arguments");
|
||||
// XXX: Only uses opaque paths, ignores all the realisations
|
||||
for (auto & p : store->queryAllValidPaths())
|
||||
paths.push_back(BuiltPath::Opaque{p});
|
||||
paths.emplace_back(BuiltPath::Opaque{p});
|
||||
} else {
|
||||
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
|
||||
if (recursive) {
|
||||
|
@ -188,7 +188,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
|
|||
}
|
||||
store->computeFSClosure(pathsRoots, pathsClosure);
|
||||
for (auto & path : pathsClosure)
|
||||
paths.push_back(BuiltPath::Opaque{path});
|
||||
paths.emplace_back(BuiltPath::Opaque{path});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
#include "common-eval-args.hh"
|
||||
#include "shared.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "util.hh"
|
||||
#include "eval.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "registry.hh"
|
||||
|
@ -165,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
|||
return res.finish();
|
||||
}
|
||||
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
|
||||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
|
@ -186,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
|||
}
|
||||
|
||||
else
|
||||
return state.rootPath(CanonPath::fromCwd(s));
|
||||
return state.rootPath(CanonPath(s, baseDir));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
///@file
|
||||
|
||||
#include "args.hh"
|
||||
#include "canon-path.hh"
|
||||
#include "common-args.hh"
|
||||
#include "search-path.hh"
|
||||
|
||||
|
@ -28,6 +29,6 @@ private:
|
|||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s);
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "util.hh"
|
||||
#include "editor-for.hh"
|
||||
#include "environment-variables.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#include "globals.hh"
|
||||
#include "installable-value.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "util.hh"
|
||||
#include "command.hh"
|
||||
#include "attr-path.hh"
|
||||
#include "common-eval-args.hh"
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "installable-attr-path.hh"
|
||||
#include "installable-flake.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "users.hh"
|
||||
#include "util.hh"
|
||||
#include "command.hh"
|
||||
#include "attr-path.hh"
|
||||
|
@ -87,7 +88,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
lockFlags.writeLockFile = false;
|
||||
lockFlags.inputOverrides.insert_or_assign(
|
||||
flake::parseInputPath(inputPath),
|
||||
parseFlakeRef(flakeRef, absPath("."), true));
|
||||
parseFlakeRef(flakeRef, absPath(getCommandBaseDir()), true));
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
|
||||
if (n == 0) {
|
||||
|
@ -129,7 +130,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
auto evalState = getEvalState();
|
||||
auto flake = flake::lockFlake(
|
||||
*evalState,
|
||||
parseFlakeRef(flakeRef, absPath(".")),
|
||||
parseFlakeRef(flakeRef, absPath(getCommandBaseDir())),
|
||||
{ .writeLockFile = false });
|
||||
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
|
||||
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
|
||||
|
@ -293,6 +294,8 @@ void completeFlakeRefWithFragment(
|
|||
prefixRoot = ".";
|
||||
}
|
||||
auto flakeRefS = std::string(prefix.substr(0, hash));
|
||||
|
||||
// TODO: ideally this would use the command base directory instead of assuming ".".
|
||||
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
||||
|
||||
auto evalCache = openEvalCache(*evalState,
|
||||
|
@ -441,10 +444,12 @@ Installables SourceExprCommand::parseInstallables(
|
|||
auto e = state->parseStdin();
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
else if (file)
|
||||
state->evalFile(lookupFileArg(*state, *file), *vFile);
|
||||
else if (file) {
|
||||
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
|
||||
}
|
||||
else {
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd()));
|
||||
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
|
||||
|
@ -479,7 +484,7 @@ Installables SourceExprCommand::parseInstallables(
|
|||
}
|
||||
|
||||
try {
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath(getCommandBaseDir()));
|
||||
result.push_back(make_ref<InstallableFlake>(
|
||||
this,
|
||||
getEvalState(),
|
||||
|
@ -663,7 +668,7 @@ BuiltPaths Installable::toBuiltPaths(
|
|||
|
||||
BuiltPaths res;
|
||||
for (auto & drvPath : Installable::toDerivations(store, installables, true))
|
||||
res.push_back(BuiltPath::Opaque{drvPath});
|
||||
res.emplace_back(BuiltPath::Opaque{drvPath});
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
@ -753,7 +758,7 @@ std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
|
|||
for (auto i : rawInstallables)
|
||||
res.push_back(parseFlakeRefWithFragment(
|
||||
expandTilde(i),
|
||||
absPath(".")).first);
|
||||
absPath(getCommandBaseDir())).first);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -775,7 +780,7 @@ std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
|
|||
return {
|
||||
parseFlakeRefWithFragment(
|
||||
expandTilde(_installable),
|
||||
absPath(".")).first
|
||||
absPath(getCommandBaseDir())).first
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "derived-path.hh"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "markdown.hh"
|
||||
#include "util.hh"
|
||||
#include "finally.hh"
|
||||
#include "terminal.hh"
|
||||
|
||||
#include <sys/queue.h>
|
||||
#include <lowdown.h>
|
||||
|
|
|
@ -22,6 +22,7 @@ extern "C" {
|
|||
#include "repl.hh"
|
||||
|
||||
#include "ansicolor.hh"
|
||||
#include "signals.hh"
|
||||
#include "shared.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-cache.hh"
|
||||
|
@ -36,6 +37,8 @@ extern "C" {
|
|||
#include "globals.hh"
|
||||
#include "flake/flake.hh"
|
||||
#include "flake/lockfile.hh"
|
||||
#include "users.hh"
|
||||
#include "terminal.hh"
|
||||
#include "editor-for.hh"
|
||||
#include "finally.hh"
|
||||
#include "markdown.hh"
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
#include "attr-path.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "util.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "users.hh"
|
||||
#include "eval-cache.hh"
|
||||
#include "sqlite.hh"
|
||||
#include "eval.hh"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "users.hh"
|
||||
#include "globals.hh"
|
||||
#include "profiles.hh"
|
||||
#include "eval.hh"
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "config.hh"
|
||||
|
||||
namespace nix {
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include "print.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "memory-input-accessor.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "flake.hh"
|
||||
#include "users.hh"
|
||||
#include "globals.hh"
|
||||
#include "fetch-settings.hh"
|
||||
#include "flake.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "terminal.hh"
|
||||
#include "flake.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-settings.hh"
|
||||
|
@ -8,6 +9,7 @@
|
|||
#include "fetchers.hh"
|
||||
#include "finally.hh"
|
||||
#include "fetch-settings.hh"
|
||||
#include "value-to-json.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -140,6 +142,11 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
|
||||
break;
|
||||
default:
|
||||
if (attr.name == state.symbols.create("publicKeys")) {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
NixStringContext emptyContext = {};
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
|
||||
} else
|
||||
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value));
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "get-drvs.hh"
|
||||
#include "util.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "derivations.hh"
|
||||
#include "store-api.hh"
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <variant>
|
||||
|
||||
#include "util.hh"
|
||||
#include "users.hh"
|
||||
|
||||
#include "nixexpr.hh"
|
||||
#include "eval.hh"
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include "path-references.hh"
|
||||
#include "store-api.hh"
|
||||
#include "util.hh"
|
||||
#include "processes.hh"
|
||||
#include "value-to-json.hh"
|
||||
#include "value-to-xml.hh"
|
||||
#include "primops.hh"
|
||||
|
@ -824,7 +825,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
|||
auto message = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.addErrorContext",
|
||||
false, false).toOwned();
|
||||
e.addTrace(nullptr, message, true);
|
||||
e.addTrace(nullptr, hintfmt(message), true);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include "registry.hh"
|
||||
#include "tarball.hh"
|
||||
#include "url.hh"
|
||||
#include "value-to-json.hh"
|
||||
|
||||
#include <ctime>
|
||||
#include <iomanip>
|
||||
|
@ -125,6 +126,10 @@ static void fetchTree(
|
|||
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean});
|
||||
else if (attr.value->type() == nInt)
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
|
||||
else if (state.symbols[attr.name] == "publicKeys") {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||
}
|
||||
else
|
||||
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)));
|
||||
|
@ -223,6 +228,7 @@ static RegisterPrimOp primop_fetchTree({
|
|||
```
|
||||
)",
|
||||
.fun = prim_fetchTree,
|
||||
.experimentalFeature = Xp::FetchTree,
|
||||
});
|
||||
|
||||
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
||||
|
@ -427,6 +433,42 @@ static RegisterPrimOp primop_fetchGit({
|
|||
With this argument being true, it's possible to load a `rev` from *any* `ref`
|
||||
(by default only `rev`s from the specified `ref` are supported).
|
||||
|
||||
- `verifyCommit` (default: `true` if `publicKey` or `publicKeys` are provided, otherwise `false`)
|
||||
|
||||
Whether to check `rev` for a signature matching `publicKey` or `publicKeys`.
|
||||
If `verifyCommit` is enabled, then `fetchGit` cannot use a local repository with uncommitted changes.
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- `publicKey`
|
||||
|
||||
The public key against which `rev` is verified if `verifyCommit` is enabled.
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- `keytype` (default: `"ssh-ed25519"`)
|
||||
|
||||
The key type of `publicKey`.
|
||||
Possible values:
|
||||
- `"ssh-dsa"`
|
||||
- `"ssh-ecdsa"`
|
||||
- `"ssh-ecdsa-sk"`
|
||||
- `"ssh-ed25519"`
|
||||
- `"ssh-ed25519-sk"`
|
||||
- `"ssh-rsa"`
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- `publicKeys`
|
||||
|
||||
The public keys against which `rev` is verified if `verifyCommit` is enabled.
|
||||
Must be given as a list of attribute sets with the following form:
|
||||
```nix
|
||||
{
|
||||
key = "<public key>";
|
||||
type = "<key type>"; # optional, default: "ssh-ed25519"
|
||||
}
|
||||
```
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
|
||||
Here are some examples of how to use `fetchGit`.
|
||||
|
||||
- To fetch a private repository over SSH:
|
||||
|
@ -501,6 +543,21 @@ static RegisterPrimOp primop_fetchGit({
|
|||
}
|
||||
```
|
||||
|
||||
- To verify the commit signature:
|
||||
|
||||
```nix
|
||||
builtins.fetchGit {
|
||||
url = "ssh://git@github.com/nixos/nix.git";
|
||||
verifyCommit = true;
|
||||
publicKeys = [
|
||||
{
|
||||
type = "ssh-ed25519";
|
||||
key = "AAAAC3NzaC1lZDI1NTE5AAAAIArPKULJOid8eS6XETwUjO48/HKBWl7FTCK0Z//fplDi";
|
||||
}
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
|
||||
|
||||
This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "search-path.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#include "value-to-json.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <iomanip>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#include "value-to-xml.hh"
|
||||
#include "xml-writer.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "util.hh"
|
||||
#include "value/context.hh"
|
||||
|
||||
#include <optional>
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "util.hh"
|
||||
#include "comparator.hh"
|
||||
#include "derived-path.hh"
|
||||
#include "variant-wrapper.hh"
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "cache.hh"
|
||||
#include "users.hh"
|
||||
#include "sqlite.hh"
|
||||
#include "sync.hh"
|
||||
#include "store-api.hh"
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "config.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include <map>
|
||||
#include <limits>
|
||||
|
|
|
@ -373,4 +373,9 @@ std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
|
|||
return {};
|
||||
}
|
||||
|
||||
std::string publicKeys_to_string(const std::vector<PublicKey>& publicKeys)
|
||||
{
|
||||
return ((nlohmann::json) publicKeys).dump();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -184,4 +184,13 @@ void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
|||
|
||||
nlohmann::json dumpRegisterInputSchemeInfo();
|
||||
|
||||
struct PublicKey
|
||||
{
|
||||
std::string type = "ssh-ed25519";
|
||||
std::string key;
|
||||
};
|
||||
NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(PublicKey, type, key)
|
||||
|
||||
std::string publicKeys_to_string(const std::vector<PublicKey>&);
|
||||
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "input-accessor.hh"
|
||||
#include "cache.hh"
|
||||
#include "finally.hh"
|
||||
#include "processes.hh"
|
||||
|
||||
#include <boost/core/span.hpp>
|
||||
|
||||
|
@ -21,6 +22,7 @@
|
|||
|
||||
#include <unordered_set>
|
||||
#include <queue>
|
||||
#include <regex>
|
||||
|
||||
namespace std {
|
||||
|
||||
|
@ -365,6 +367,64 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
refspec
|
||||
}, {}, true);
|
||||
}
|
||||
|
||||
void verifyCommit(
|
||||
const Hash & rev,
|
||||
const std::vector<fetchers::PublicKey> & publicKeys) override
|
||||
{
|
||||
// Create ad-hoc allowedSignersFile and populate it with publicKeys
|
||||
auto allowedSignersFile = createTempFile().second;
|
||||
std::string allowedSigners;
|
||||
for (const fetchers::PublicKey & k : publicKeys) {
|
||||
if (k.type != "ssh-dsa"
|
||||
&& k.type != "ssh-ecdsa"
|
||||
&& k.type != "ssh-ecdsa-sk"
|
||||
&& k.type != "ssh-ed25519"
|
||||
&& k.type != "ssh-ed25519-sk"
|
||||
&& k.type != "ssh-rsa")
|
||||
throw Error("Unknown key type '%s'.\n"
|
||||
"Please use one of\n"
|
||||
"- ssh-dsa\n"
|
||||
" ssh-ecdsa\n"
|
||||
" ssh-ecdsa-sk\n"
|
||||
" ssh-ed25519\n"
|
||||
" ssh-ed25519-sk\n"
|
||||
" ssh-rsa", k.type);
|
||||
allowedSigners += "* " + k.type + " " + k.key + "\n";
|
||||
}
|
||||
writeFile(allowedSignersFile, allowedSigners);
|
||||
|
||||
// Run verification command
|
||||
auto [status, output] = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.args = {
|
||||
"-c",
|
||||
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||
"-C", path.abs(),
|
||||
"verify-commit",
|
||||
rev.gitRev()
|
||||
},
|
||||
.mergeStderrToStdout = true,
|
||||
});
|
||||
|
||||
/* Evaluate result through status code and checking if public
|
||||
key fingerprints appear on stderr. This is neccessary
|
||||
because the git command might also succeed due to the
|
||||
commit being signed by gpg keys that are present in the
|
||||
users key agent. */
|
||||
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
||||
for (const fetchers::PublicKey & k : publicKeys){
|
||||
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
||||
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
||||
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
||||
re += "(" + escaped_fingerprint + ")";
|
||||
}
|
||||
re += "]";
|
||||
if (status == 0 && std::regex_search(output, std::regex(re)))
|
||||
printTalkative("Signature verification on commit %s succeeded.", rev.gitRev());
|
||||
else
|
||||
throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
|
||||
}
|
||||
};
|
||||
|
||||
ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
namespace fetchers { struct PublicKey; }
|
||||
|
||||
struct GitRepo
|
||||
{
|
||||
virtual ~GitRepo()
|
||||
|
@ -72,6 +74,14 @@ struct GitRepo
|
|||
virtual void fetch(
|
||||
const std::string & url,
|
||||
const std::string & refspec) = 0;
|
||||
|
||||
/**
|
||||
* Verify that commit `rev` is signed by one of the keys in
|
||||
* `publicKeys`. Throw an error if it isn't.
|
||||
*/
|
||||
virtual void verifyCommit(
|
||||
const Hash & rev,
|
||||
const std::vector<fetchers::PublicKey> & publicKeys) = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
#include "fetchers.hh"
|
||||
#include "users.hh"
|
||||
#include "cache.hh"
|
||||
#include "globals.hh"
|
||||
#include "tarfile.hh"
|
||||
#include "store-api.hh"
|
||||
#include "url-parts.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "util.hh"
|
||||
#include "processes.hh"
|
||||
#include "git.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "union-input-accessor.hh"
|
||||
|
@ -135,6 +136,19 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
|
|||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::vector<PublicKey> getPublicKeys(const Attrs & attrs)
|
||||
{
|
||||
std::vector<PublicKey> publicKeys;
|
||||
if (attrs.contains("publicKeys")) {
|
||||
nlohmann::json publicKeysJson = nlohmann::json::parse(getStrAttr(attrs, "publicKeys"));
|
||||
ensureType(publicKeysJson, nlohmann::json::value_t::array);
|
||||
publicKeys = publicKeysJson.get<std::vector<PublicKey>>();
|
||||
}
|
||||
if (attrs.contains("publicKey"))
|
||||
publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")});
|
||||
return publicKeys;
|
||||
}
|
||||
|
||||
} // end namespace
|
||||
|
||||
struct GitInputScheme : InputScheme
|
||||
|
@ -155,9 +169,9 @@ struct GitInputScheme : InputScheme
|
|||
attrs.emplace("type", "git");
|
||||
|
||||
for (auto & [name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref")
|
||||
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
|
||||
attrs.emplace(name, value);
|
||||
else if (name == "shallow" || name == "submodules" || name == "allRefs")
|
||||
else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit")
|
||||
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||
else
|
||||
url2.query.emplace(name, value);
|
||||
|
@ -189,14 +203,26 @@ struct GitInputScheme : InputScheme
|
|||
"name",
|
||||
"dirtyRev",
|
||||
"dirtyShortRev",
|
||||
"verifyCommit",
|
||||
"keytype",
|
||||
"publicKey",
|
||||
"publicKeys",
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
for (auto & [name, _] : attrs)
|
||||
if (name == "verifyCommit"
|
||||
|| name == "keytype"
|
||||
|| name == "publicKey"
|
||||
|| name == "publicKeys")
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
|
||||
maybeGetBoolAttr(attrs, "shallow");
|
||||
maybeGetBoolAttr(attrs, "submodules");
|
||||
maybeGetBoolAttr(attrs, "allRefs");
|
||||
maybeGetBoolAttr(attrs, "verifyCommit");
|
||||
|
||||
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
||||
if (std::regex_search(*ref, badGitRefRegex))
|
||||
|
@ -219,6 +245,15 @@ struct GitInputScheme : InputScheme
|
|||
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
||||
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
|
||||
url.query.insert_or_assign("shallow", "1");
|
||||
if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false))
|
||||
url.query.insert_or_assign("verifyCommit", "1");
|
||||
auto publicKeys = getPublicKeys(input.attrs);
|
||||
if (publicKeys.size() == 1) {
|
||||
url.query.insert_or_assign("keytype", publicKeys.at(0).type);
|
||||
url.query.insert_or_assign("publicKey", publicKeys.at(0).key);
|
||||
}
|
||||
else if (publicKeys.size() > 1)
|
||||
url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys));
|
||||
return url;
|
||||
}
|
||||
|
||||
|
@ -416,6 +451,19 @@ struct GitInputScheme : InputScheme
|
|||
};
|
||||
}
|
||||
|
||||
void verifyCommit(const Input & input, std::shared_ptr<GitRepo> repo) const
|
||||
{
|
||||
auto publicKeys = getPublicKeys(input.attrs);
|
||||
auto verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty());
|
||||
|
||||
if (verifyCommit) {
|
||||
if (input.getRev() && repo)
|
||||
repo->verifyCommit(*input.getRev(), publicKeys);
|
||||
else
|
||||
throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
std::pair<ref<InputAccessor>, Input> getAccessorFromCommit(
|
||||
ref<Store> store,
|
||||
RepoInfo & repoInfo,
|
||||
|
@ -513,7 +561,9 @@ struct GitInputScheme : InputScheme
|
|||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
}
|
||||
|
||||
auto isShallow = GitRepo::openRepo(CanonPath(repoDir))->isShallow();
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoDir));
|
||||
|
||||
auto isShallow = repo->isShallow();
|
||||
|
||||
if (isShallow && !repoInfo.shallow)
|
||||
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url);
|
||||
|
@ -533,7 +583,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.url);
|
||||
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoDir));
|
||||
verifyCommit(input, repo);
|
||||
|
||||
auto accessor = repo->getAccessor(rev);
|
||||
|
||||
|
@ -565,8 +615,7 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
}
|
||||
|
||||
assert(input.getRev());
|
||||
assert(!origRev || origRev == input.getRev());
|
||||
assert(!origRev || origRev == rev);
|
||||
if (!repoInfo.shallow)
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
||||
|
@ -615,14 +664,17 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
|
||||
if (!repoInfo.workdirInfo.isDirty) {
|
||||
if (auto ref = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef())
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url));
|
||||
|
||||
if (auto ref = repo->getWorkdirRef())
|
||||
input.attrs.insert_or_assign("ref", *ref);
|
||||
|
||||
auto rev = repoInfo.workdirInfo.headRev.value();
|
||||
|
||||
input.attrs.insert_or_assign("rev", rev.gitRev());
|
||||
|
||||
input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev));
|
||||
|
||||
verifyCommit(input, repo);
|
||||
} else {
|
||||
repoInfo.warnDirty();
|
||||
|
||||
|
@ -632,6 +684,8 @@ struct GitInputScheme : InputScheme
|
|||
input.attrs.insert_or_assign("dirtyShortRev",
|
||||
repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty");
|
||||
}
|
||||
|
||||
verifyCommit(input, nullptr);
|
||||
}
|
||||
|
||||
input.attrs.insert_or_assign(
|
||||
|
@ -651,10 +705,10 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
|
||||
if (input.getRef() || input.getRev() || !repoInfo.isLocal)
|
||||
return getAccessorFromCommit(store, repoInfo, std::move(input));
|
||||
else
|
||||
return getAccessorFromWorkdir(store, repoInfo, std::move(input));
|
||||
return
|
||||
input.getRef() || input.getRev() || !repoInfo.isLocal
|
||||
? getAccessorFromCommit(store, repoInfo, std::move(input))
|
||||
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "source-accessor.hh"
|
||||
#include "ref.hh"
|
||||
#include "types.hh"
|
||||
#include "file-system.hh"
|
||||
#include "repair-flag.hh"
|
||||
#include "content-address.hh"
|
||||
|
||||
|
@ -14,7 +16,7 @@ struct SourcePath;
|
|||
class StorePath;
|
||||
class Store;
|
||||
|
||||
struct InputAccessor : SourceAccessor, std::enable_shared_from_this<InputAccessor>
|
||||
struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<InputAccessor>
|
||||
{
|
||||
/**
|
||||
* Return the maximum last-modified time of the files in this
|
||||
|
|
|
@ -1,48 +1,16 @@
|
|||
#include "memory-input-accessor.hh"
|
||||
#include "memory-source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct MemoryInputAccessorImpl : MemoryInputAccessor
|
||||
struct MemoryInputAccessorImpl : MemoryInputAccessor, MemorySourceAccessor
|
||||
{
|
||||
std::map<CanonPath, std::string> files;
|
||||
|
||||
std::string readFile(const CanonPath & path) override
|
||||
{
|
||||
auto i = files.find(path);
|
||||
if (i == files.end())
|
||||
throw Error("file '%s' does not exist", path);
|
||||
return i->second;
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
auto i = files.find(path);
|
||||
return i != files.end();
|
||||
}
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
{
|
||||
auto i = files.find(path);
|
||||
if (i != files.end())
|
||||
return Stat { .type = tRegular, .isExecutable = false };
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
throw UnimplementedError("MemoryInputAccessor::readLink");
|
||||
}
|
||||
|
||||
SourcePath addFile(CanonPath path, std::string && contents) override
|
||||
{
|
||||
files.emplace(path, std::move(contents));
|
||||
|
||||
return {ref(shared_from_this()), std::move(path)};
|
||||
return {
|
||||
ref(shared_from_this()),
|
||||
MemorySourceAccessor::addFile(path, std::move(contents))
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#include "fetchers.hh"
|
||||
#include "processes.hh"
|
||||
#include "users.hh"
|
||||
#include "cache.hh"
|
||||
#include "globals.hh"
|
||||
#include "tarfile.hh"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "registry.hh"
|
||||
#include "tarball.hh"
|
||||
#include "util.hh"
|
||||
#include "users.hh"
|
||||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
#include "local-fs-store.hh"
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
#include "common-args.hh"
|
||||
#include "args/root.hh"
|
||||
#include "globals.hh"
|
||||
#include "logging.hh"
|
||||
#include "loggers.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "loggers.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "progress-bar.hh"
|
||||
#include "util.hh"
|
||||
#include "terminal.hh"
|
||||
#include "sync.hh"
|
||||
#include "store-api.hh"
|
||||
#include "names.hh"
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
#include "globals.hh"
|
||||
#include "current-process.hh"
|
||||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "gc-store.hh"
|
||||
#include "util.hh"
|
||||
#include "loggers.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "util.hh"
|
||||
#include "processes.hh"
|
||||
#include "args.hh"
|
||||
#include "args/root.hh"
|
||||
#include "common-args.hh"
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include "nar-accessor.hh"
|
||||
#include "thread-pool.hh"
|
||||
#include "callback.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <chrono>
|
||||
#include <future>
|
||||
|
|
37
src/libstore/build/child.cc
Normal file
37
src/libstore/build/child.cc
Normal file
|
@ -0,0 +1,37 @@
|
|||
#include "child.hh"
|
||||
#include "current-process.hh"
|
||||
#include "logging.hh"
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
void commonChildInit()
|
||||
{
|
||||
logger = makeSimpleLogger();
|
||||
|
||||
const static std::string pathNullDevice = "/dev/null";
|
||||
restoreProcessContext(false);
|
||||
|
||||
/* Put the child in a separate session (and thus a separate
|
||||
process group) so that it has no controlling terminal (meaning
|
||||
that e.g. ssh cannot open /dev/tty) and it doesn't receive
|
||||
terminal signals. */
|
||||
if (setsid() == -1)
|
||||
throw SysError("creating a new session");
|
||||
|
||||
/* Dup stderr to stdout. */
|
||||
if (dup2(STDERR_FILENO, STDOUT_FILENO) == -1)
|
||||
throw SysError("cannot dup stderr into stdout");
|
||||
|
||||
/* Reroute stdin to /dev/null. */
|
||||
int fdDevNull = open(pathNullDevice.c_str(), O_RDWR);
|
||||
if (fdDevNull == -1)
|
||||
throw SysError("cannot open '%1%'", pathNullDevice);
|
||||
if (dup2(fdDevNull, STDIN_FILENO) == -1)
|
||||
throw SysError("cannot dup null device into stdin");
|
||||
close(fdDevNull);
|
||||
}
|
||||
|
||||
}
|
11
src/libstore/build/child.hh
Normal file
11
src/libstore/build/child.hh
Normal file
|
@ -0,0 +1,11 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Common initialisation performed in child processes.
|
||||
*/
|
||||
void commonChildInit();
|
||||
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
#include "globals.hh"
|
||||
#include "hook-instance.hh"
|
||||
#include "file-system.hh"
|
||||
#include "child.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
#include "logging.hh"
|
||||
#include "serialise.hh"
|
||||
#include "processes.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -15,7 +15,10 @@
|
|||
#include "json-utils.hh"
|
||||
#include "cgroup.hh"
|
||||
#include "personality.hh"
|
||||
#include "current-process.hh"
|
||||
#include "namespaces.hh"
|
||||
#include "child.hh"
|
||||
#include "unix-domain-socket.hh"
|
||||
|
||||
#include <regex>
|
||||
#include <queue>
|
||||
|
@ -1620,6 +1623,8 @@ void setupSeccomp()
|
|||
seccomp_release(ctx);
|
||||
});
|
||||
|
||||
constexpr std::string_view nativeSystem = SYSTEM;
|
||||
|
||||
if (nativeSystem == "x86_64-linux" &&
|
||||
seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0)
|
||||
throw SysError("unable to add 32-bit seccomp architecture");
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
#include "derivation-goal.hh"
|
||||
#include "local-store.hh"
|
||||
#include "processes.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "drv-output-substitution-goal.hh"
|
||||
#include "local-derivation-goal.hh"
|
||||
#include "hook-instance.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <poll.h>
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "serialise.hh"
|
||||
#include "util.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "store-api.hh"
|
||||
#include "build-result.hh"
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "crypto.hh"
|
||||
#include "file-system.hh"
|
||||
#include "util.hh"
|
||||
#include "globals.hh"
|
||||
|
||||
|
|
|
@ -352,7 +352,7 @@ Derivation parseDerivation(
|
|||
expect(str, "erive(");
|
||||
version = DerivationATermVersion::Traditional;
|
||||
break;
|
||||
case 'r':
|
||||
case 'r': {
|
||||
expect(str, "rvWithVersion(");
|
||||
auto versionS = parseString(str);
|
||||
if (versionS == "xp-dyn-drv") {
|
||||
|
@ -365,6 +365,9 @@ Derivation parseDerivation(
|
|||
expect(str, ",");
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw Error("derivation does not start with 'Derive' or 'DrvWithVersion'");
|
||||
}
|
||||
|
||||
/* Parse the list of outputs. */
|
||||
expect(str, "[");
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "derived-path-map.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "derived-path.hh"
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "comparator.hh"
|
||||
#include "config.hh"
|
||||
|
||||
#include <variant>
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
#include "filetransfer.hh"
|
||||
#include "util.hh"
|
||||
#include "namespaces.hh"
|
||||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
#include "s3.hh"
|
||||
#include "compression.hh"
|
||||
#include "finally.hh"
|
||||
#include "callback.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#if ENABLE_S3
|
||||
#include <aws/core/client/ClientConfiguration.h>
|
||||
|
|
|
@ -2,6 +2,13 @@
|
|||
#include "globals.hh"
|
||||
#include "local-store.hh"
|
||||
#include "finally.hh"
|
||||
#include "unix-domain-socket.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#if !defined(__linux__)
|
||||
// For shelling out to lsof
|
||||
# include "processes.hh"
|
||||
#endif
|
||||
|
||||
#include <functional>
|
||||
#include <queue>
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#include "globals.hh"
|
||||
#include "util.hh"
|
||||
#include "current-process.hh"
|
||||
#include "archive.hh"
|
||||
#include "args.hh"
|
||||
#include "users.hh"
|
||||
#include "abstract-setting-to-json.hh"
|
||||
#include "compute-levels.hh"
|
||||
|
||||
|
@ -17,9 +18,13 @@
|
|||
#include <sodium/core.h>
|
||||
|
||||
#ifdef __GLIBC__
|
||||
#include <gnu/lib-names.h>
|
||||
#include <nss.h>
|
||||
#include <dlfcn.h>
|
||||
# include <gnu/lib-names.h>
|
||||
# include <nss.h>
|
||||
# include <dlfcn.h>
|
||||
#endif
|
||||
|
||||
#if __APPLE__
|
||||
# include "processes.hh"
|
||||
#endif
|
||||
|
||||
#include "config-impl.hh"
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "config.hh"
|
||||
#include "util.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
#include <map>
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include "topo-sort.hh"
|
||||
#include "finally.hh"
|
||||
#include "compression.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
#include "store-api.hh"
|
||||
#include "indirect-root-store.hh"
|
||||
#include "sync.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include <chrono>
|
||||
#include <future>
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "lock.hh"
|
||||
#include "file-system.hh"
|
||||
#include "globals.hh"
|
||||
#include "pathlocks.hh"
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "machines.hh"
|
||||
#include "util.hh"
|
||||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
|
|
|
@ -60,12 +60,22 @@ struct NarAccessor : public SourceAccessor
|
|||
|
||||
void createDirectory(const Path & path) override
|
||||
{
|
||||
createMember(path, {Type::tDirectory, false, 0, 0});
|
||||
createMember(path, NarMember{ .stat = {
|
||||
.type = Type::tDirectory,
|
||||
.fileSize = 0,
|
||||
.isExecutable = false,
|
||||
.narOffset = 0
|
||||
} });
|
||||
}
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
{
|
||||
createMember(path, {Type::tRegular, false, 0, 0});
|
||||
createMember(path, NarMember{ .stat = {
|
||||
.type = Type::tRegular,
|
||||
.fileSize = 0,
|
||||
.isExecutable = false,
|
||||
.narOffset = 0
|
||||
} });
|
||||
}
|
||||
|
||||
void closeRegularFile() override
|
||||
|
@ -78,9 +88,8 @@ struct NarAccessor : public SourceAccessor
|
|||
|
||||
void preallocateContents(uint64_t size) override
|
||||
{
|
||||
assert(size <= std::numeric_limits<uint64_t>::max());
|
||||
auto & st = parents.top()->stat;
|
||||
st.fileSize = (uint64_t) size;
|
||||
st.fileSize = size;
|
||||
st.narOffset = pos;
|
||||
}
|
||||
|
||||
|
@ -128,9 +137,8 @@ struct NarAccessor : public SourceAccessor
|
|||
|
||||
if (type == "directory") {
|
||||
member.stat = {.type = Type::tDirectory};
|
||||
for (auto i = v["entries"].begin(); i != v["entries"].end(); ++i) {
|
||||
std::string name = i.key();
|
||||
recurse(member.children[name], i.value());
|
||||
for (const auto &[name, function] : v["entries"].items()) {
|
||||
recurse(member.children[name], function);
|
||||
}
|
||||
} else if (type == "regular") {
|
||||
member.stat = {
|
||||
|
@ -153,7 +161,7 @@ struct NarAccessor : public SourceAccessor
|
|||
{
|
||||
NarMember * current = &root;
|
||||
|
||||
for (auto & i : path) {
|
||||
for (const auto & i : path) {
|
||||
if (current->stat.type != Type::tDirectory) return nullptr;
|
||||
auto child = current->children.find(std::string(i));
|
||||
if (child == current->children.end()) return nullptr;
|
||||
|
@ -186,7 +194,7 @@ struct NarAccessor : public SourceAccessor
|
|||
throw Error("path '%1%' inside NAR file is not a directory", path);
|
||||
|
||||
DirEntries res;
|
||||
for (auto & child : i.children)
|
||||
for (const auto & child : i.children)
|
||||
res.insert_or_assign(child.first, std::nullopt);
|
||||
|
||||
return res;
|
||||
|
@ -251,7 +259,7 @@ json listNar(ref<SourceAccessor> accessor, const CanonPath & path, bool recurse)
|
|||
{
|
||||
obj["entries"] = json::object();
|
||||
json &res2 = obj["entries"];
|
||||
for (auto & [name, type] : accessor->readDirectory(path)) {
|
||||
for (const auto & [name, type] : accessor->readDirectory(path)) {
|
||||
if (recurse) {
|
||||
res2[name] = listNar(accessor, path + name, true);
|
||||
} else
|
||||
|
|
|
@ -25,7 +25,7 @@ ref<SourceAccessor> makeNarAccessor(Source & source);
|
|||
* readFile() method of the accessor to get the contents of files
|
||||
* inside the NAR.
|
||||
*/
|
||||
typedef std::function<std::string(uint64_t, uint64_t)> GetNarBytes;
|
||||
using GetNarBytes = std::function<std::string(uint64_t, uint64_t)>;
|
||||
|
||||
ref<SourceAccessor> makeLazyNarAccessor(
|
||||
const std::string & listing,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "nar-info-disk-cache.hh"
|
||||
#include "users.hh"
|
||||
#include "sync.hh"
|
||||
#include "sqlite.hh"
|
||||
#include "globals.hh"
|
||||
|
|
|
@ -4,6 +4,15 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
GENERATE_CMP_EXT(
|
||||
,
|
||||
NarInfo,
|
||||
me->url,
|
||||
me->compression,
|
||||
me->fileHash,
|
||||
me->fileSize,
|
||||
static_cast<const ValidPathInfo &>(*me));
|
||||
|
||||
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
||||
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||
{
|
||||
|
@ -29,12 +38,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
while (pos < s.size()) {
|
||||
|
||||
size_t colon = s.find(':', pos);
|
||||
if (colon == std::string::npos) throw corrupt("expecting ':'");
|
||||
if (colon == s.npos) throw corrupt("expecting ':'");
|
||||
|
||||
std::string name(s, pos, colon - pos);
|
||||
|
||||
size_t eol = s.find('\n', colon + 2);
|
||||
if (eol == std::string::npos) throw corrupt("expecting '\\n'");
|
||||
if (eol == s.npos) throw corrupt("expecting '\\n'");
|
||||
|
||||
std::string value(s, colon + 2, eol - colon - 2);
|
||||
|
||||
|
@ -125,4 +134,59 @@ std::string NarInfo::to_string(const Store & store) const
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json NarInfo::toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (!url.empty())
|
||||
jsonObject["url"] = url;
|
||||
if (!compression.empty())
|
||||
jsonObject["compression"] = compression;
|
||||
if (fileHash)
|
||||
jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true);
|
||||
if (fileSize)
|
||||
jsonObject["downloadSize"] = fileSize;
|
||||
}
|
||||
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
NarInfo NarInfo::fromJSON(
|
||||
const Store & store,
|
||||
const StorePath & path,
|
||||
const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
NarInfo res {
|
||||
ValidPathInfo {
|
||||
path,
|
||||
UnkeyedValidPathInfo::fromJSON(store, json),
|
||||
}
|
||||
};
|
||||
|
||||
if (json.contains("url"))
|
||||
res.url = ensureType(valueAt(json, "url"), value_t::string);
|
||||
|
||||
if (json.contains("compression"))
|
||||
res.compression = ensureType(valueAt(json, "compression"), value_t::string);
|
||||
|
||||
if (json.contains("downloadHash"))
|
||||
res.fileHash = Hash::parseAny(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "downloadHash"), value_t::string)),
|
||||
std::nullopt);
|
||||
|
||||
if (json.contains("downloadSize"))
|
||||
res.fileSize = ensureType(valueAt(json, "downloadSize"), value_t::number_integer);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,14 +17,25 @@ struct NarInfo : ValidPathInfo
|
|||
uint64_t fileSize = 0;
|
||||
|
||||
NarInfo() = delete;
|
||||
NarInfo(const Store & store, std::string && name, ContentAddressWithReferences && ca, Hash narHash)
|
||||
NarInfo(const Store & store, std::string name, ContentAddressWithReferences ca, Hash narHash)
|
||||
: ValidPathInfo(store, std::move(name), std::move(ca), narHash)
|
||||
{ }
|
||||
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
|
||||
NarInfo(StorePath path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
|
||||
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
|
||||
NarInfo(const Store & store, const std::string & s, const std::string & whence);
|
||||
|
||||
DECLARE_CMP(NarInfo);
|
||||
|
||||
std::string to_string(const Store & store) const;
|
||||
|
||||
nlohmann::json toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const override;
|
||||
static NarInfo fromJSON(
|
||||
const Store & store,
|
||||
const StorePath & path,
|
||||
const nlohmann::json & json);
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "util.hh"
|
||||
#include "local-store.hh"
|
||||
#include "globals.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
|
|
|
@ -132,6 +132,41 @@ bool ParsedDerivation::useUidRange() const
|
|||
|
||||
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
||||
|
||||
/**
|
||||
* Write a JSON representation of store object metadata, such as the
|
||||
* hash and the references.
|
||||
*/
|
||||
static nlohmann::json pathInfoToJSON(
|
||||
Store & store,
|
||||
const StorePathSet & storePaths)
|
||||
{
|
||||
nlohmann::json::array_t jsonList = nlohmann::json::array();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
auto info = store.queryPathInfo(storePath);
|
||||
|
||||
auto & jsonPath = jsonList.emplace_back(
|
||||
info->toJSON(store, false, HashFormat::Base32));
|
||||
|
||||
// Add the path to the object whose metadata we are including.
|
||||
jsonPath["path"] = store.printStorePath(storePath);
|
||||
|
||||
jsonPath["valid"] = true;
|
||||
|
||||
jsonPath["closureSize"] = ({
|
||||
uint64_t totalNarSize = 0;
|
||||
StorePathSet closure;
|
||||
store.computeFSClosure(info->path, closure, false, false);
|
||||
for (auto & p : closure) {
|
||||
auto info = store.queryPathInfo(p);
|
||||
totalNarSize += info->narSize;
|
||||
}
|
||||
totalNarSize;
|
||||
});
|
||||
}
|
||||
return jsonList;
|
||||
}
|
||||
|
||||
std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths)
|
||||
{
|
||||
auto structuredAttrs = getStructuredAttrs();
|
||||
|
@ -152,8 +187,8 @@ std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & s
|
|||
StorePathSet storePaths;
|
||||
for (auto & p : *i)
|
||||
storePaths.insert(store.parseStorePath(p.get<std::string>()));
|
||||
json[i.key()] = store.pathInfoToJSON(
|
||||
store.exportReferences(storePaths, inputPaths), false, true);
|
||||
json[i.key()] = pathInfoToJSON(store,
|
||||
store.exportReferences(storePaths, inputPaths));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "path-info.hh"
|
||||
#include "store-api.hh"
|
||||
#include "json-utils.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -144,4 +147,94 @@ ValidPathInfo::ValidPathInfo(
|
|||
}, std::move(ca).raw);
|
||||
}
|
||||
|
||||
|
||||
nlohmann::json UnkeyedValidPathInfo::toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = json::object();
|
||||
|
||||
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
|
||||
jsonObject["narSize"] = narSize;
|
||||
|
||||
{
|
||||
auto& jsonRefs = (jsonObject["references"] = json::array());
|
||||
for (auto & ref : references)
|
||||
jsonRefs.emplace_back(store.printStorePath(ref));
|
||||
}
|
||||
|
||||
if (ca)
|
||||
jsonObject["ca"] = renderContentAddress(ca);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (deriver)
|
||||
jsonObject["deriver"] = store.printStorePath(*deriver);
|
||||
|
||||
if (registrationTime)
|
||||
jsonObject["registrationTime"] = registrationTime;
|
||||
|
||||
if (ultimate)
|
||||
jsonObject["ultimate"] = ultimate;
|
||||
|
||||
if (!sigs.empty()) {
|
||||
for (auto & sig : sigs)
|
||||
jsonObject["signatures"].push_back(sig);
|
||||
}
|
||||
}
|
||||
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
UnkeyedValidPathInfo res {
|
||||
Hash(Hash::dummy),
|
||||
};
|
||||
|
||||
ensureType(json, value_t::object);
|
||||
res.narHash = Hash::parseAny(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "narHash"), value_t::string)),
|
||||
std::nullopt);
|
||||
res.narSize = ensureType(valueAt(json, "narSize"), value_t::number_integer);
|
||||
|
||||
try {
|
||||
auto & references = ensureType(valueAt(json, "references"), value_t::array);
|
||||
for (auto & input : references)
|
||||
res.references.insert(store.parseStorePath(static_cast<const std::string &>
|
||||
(input)));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'references'");
|
||||
throw;
|
||||
}
|
||||
|
||||
if (json.contains("ca"))
|
||||
res.ca = ContentAddress::parse(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "ca"), value_t::string)));
|
||||
|
||||
if (json.contains("deriver"))
|
||||
res.deriver = store.parseStorePath(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "deriver"), value_t::string)));
|
||||
|
||||
if (json.contains("registrationTime"))
|
||||
res.registrationTime = ensureType(valueAt(json, "registrationTime"), value_t::number_integer);
|
||||
|
||||
if (json.contains("ultimate"))
|
||||
res.ultimate = ensureType(valueAt(json, "ultimate"), value_t::boolean);
|
||||
|
||||
if (json.contains("signatures"))
|
||||
res.sigs = valueAt(json, "signatures");
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -78,6 +78,18 @@ struct UnkeyedValidPathInfo
|
|||
DECLARE_CMP(UnkeyedValidPathInfo);
|
||||
|
||||
virtual ~UnkeyedValidPathInfo() { }
|
||||
|
||||
/**
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const;
|
||||
static UnkeyedValidPathInfo fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json);
|
||||
};
|
||||
|
||||
struct ValidPathInfo : UnkeyedValidPathInfo {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
#include "path-references.hh"
|
||||
#include "hash.hh"
|
||||
#include "util.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
#include <map>
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "references.hh"
|
||||
#include "path.hh"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "pathlocks.hh"
|
||||
#include "util.hh"
|
||||
#include "sync.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <cerrno>
|
||||
#include <cstdlib>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "util.hh"
|
||||
#include "file-descriptor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#include "profiles.hh"
|
||||
#include "store-api.hh"
|
||||
#include "local-fs-store.hh"
|
||||
#include "util.hh"
|
||||
#include "users.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "remote-store.hh"
|
||||
#include "worker-protocol.hh"
|
||||
#include "pool.hh"
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "serialise.hh"
|
||||
#include "util.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "store-api.hh"
|
||||
#include "build-result.hh"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "globals.hh"
|
||||
#include "util.hh"
|
||||
#include "url.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <sqlite3.h>
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
#include "ssh.hh"
|
||||
#include "finally.hh"
|
||||
#include "current-process.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -111,9 +114,11 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
|||
reply = readLine(out.readSide.get());
|
||||
} catch (EndOfFile & e) { }
|
||||
|
||||
if (reply != "started")
|
||||
if (reply != "started") {
|
||||
printTalkative("SSH stdout first line: %s", reply);
|
||||
throw Error("failed to start SSH connection to '%s'", host);
|
||||
}
|
||||
}
|
||||
|
||||
conn->out = std::move(out.readSide);
|
||||
conn->in = std::move(in.writeSide);
|
||||
|
@ -129,7 +134,6 @@ Path SSHMaster::startMaster()
|
|||
|
||||
if (state->sshMaster != -1) return state->socketPath;
|
||||
|
||||
|
||||
state->socketPath = (Path) *state->tmpDir + "/ssh.sock";
|
||||
|
||||
Pipe out;
|
||||
|
@ -141,7 +145,8 @@ Path SSHMaster::startMaster()
|
|||
logger->pause();
|
||||
Finally cleanup = [&]() { logger->resume(); };
|
||||
|
||||
bool wasMasterRunning = isMasterRunning();
|
||||
if (isMasterRunning())
|
||||
return state->socketPath;
|
||||
|
||||
state->sshMaster = startProcess([&]() {
|
||||
restoreProcessContext();
|
||||
|
@ -162,13 +167,13 @@ Path SSHMaster::startMaster()
|
|||
|
||||
out.writeSide = -1;
|
||||
|
||||
if (!wasMasterRunning) {
|
||||
std::string reply;
|
||||
try {
|
||||
reply = readLine(out.readSide.get());
|
||||
} catch (EndOfFile & e) { }
|
||||
|
||||
if (reply != "started")
|
||||
if (reply != "started") {
|
||||
printTalkative("SSH master stdout first line: %s", reply);
|
||||
throw Error("failed to start SSH master connection to '%s'", host);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "util.hh"
|
||||
#include "sync.hh"
|
||||
#include "processes.hh"
|
||||
#include "file-system.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
// FIXME this should not be here, see TODO below on
|
||||
// `addMultipleToStore`.
|
||||
#include "worker-protocol.hh"
|
||||
#include "signals.hh"
|
||||
#include "users.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <regex>
|
||||
|
@ -819,7 +821,7 @@ void Store::substitutePaths(const StorePathSet & paths)
|
|||
std::vector<DerivedPath> paths2;
|
||||
for (auto & path : paths)
|
||||
if (!path.isDerivation())
|
||||
paths2.push_back(DerivedPath::Opaque{path});
|
||||
paths2.emplace_back(DerivedPath::Opaque{path});
|
||||
uint64_t downloadSize, narSize;
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
queryMissing(paths2,
|
||||
|
@ -949,96 +951,6 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor
|
|||
return paths;
|
||||
}
|
||||
|
||||
json Store::pathInfoToJSON(const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
HashFormat hashFormat,
|
||||
AllowInvalidFlag allowInvalid)
|
||||
{
|
||||
json::array_t jsonList = json::array();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
auto& jsonPath = jsonList.emplace_back(json::object());
|
||||
|
||||
try {
|
||||
auto info = queryPathInfo(storePath);
|
||||
|
||||
jsonPath["path"] = printStorePath(info->path);
|
||||
jsonPath["valid"] = true;
|
||||
jsonPath["narHash"] = info->narHash.to_string(hashFormat, true);
|
||||
jsonPath["narSize"] = info->narSize;
|
||||
|
||||
{
|
||||
auto& jsonRefs = (jsonPath["references"] = json::array());
|
||||
for (auto & ref : info->references)
|
||||
jsonRefs.emplace_back(printStorePath(ref));
|
||||
}
|
||||
|
||||
if (info->ca)
|
||||
jsonPath["ca"] = renderContentAddress(info->ca);
|
||||
|
||||
std::pair<uint64_t, uint64_t> closureSizes;
|
||||
|
||||
if (showClosureSize) {
|
||||
closureSizes = getClosureSize(info->path);
|
||||
jsonPath["closureSize"] = closureSizes.first;
|
||||
}
|
||||
|
||||
if (includeImpureInfo) {
|
||||
|
||||
if (info->deriver)
|
||||
jsonPath["deriver"] = printStorePath(*info->deriver);
|
||||
|
||||
if (info->registrationTime)
|
||||
jsonPath["registrationTime"] = info->registrationTime;
|
||||
|
||||
if (info->ultimate)
|
||||
jsonPath["ultimate"] = info->ultimate;
|
||||
|
||||
if (!info->sigs.empty()) {
|
||||
for (auto & sig : info->sigs)
|
||||
jsonPath["signatures"].push_back(sig);
|
||||
}
|
||||
|
||||
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
|
||||
std::shared_ptr<const ValidPathInfo>(info));
|
||||
|
||||
if (narInfo) {
|
||||
if (!narInfo->url.empty())
|
||||
jsonPath["url"] = narInfo->url;
|
||||
if (narInfo->fileHash)
|
||||
jsonPath["downloadHash"] = narInfo->fileHash->to_string(hashFormat, true);
|
||||
if (narInfo->fileSize)
|
||||
jsonPath["downloadSize"] = narInfo->fileSize;
|
||||
if (showClosureSize)
|
||||
jsonPath["closureDownloadSize"] = closureSizes.second;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (InvalidPath &) {
|
||||
jsonPath["path"] = printStorePath(storePath);
|
||||
jsonPath["valid"] = false;
|
||||
}
|
||||
}
|
||||
return jsonList;
|
||||
}
|
||||
|
||||
|
||||
std::pair<uint64_t, uint64_t> Store::getClosureSize(const StorePath & storePath)
|
||||
{
|
||||
uint64_t totalNarSize = 0, totalDownloadSize = 0;
|
||||
StorePathSet closure;
|
||||
computeFSClosure(storePath, closure, false, false);
|
||||
for (auto & p : closure) {
|
||||
auto info = queryPathInfo(p);
|
||||
totalNarSize += info->narSize;
|
||||
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
|
||||
std::shared_ptr<const ValidPathInfo>(info));
|
||||
if (narInfo)
|
||||
totalDownloadSize += narInfo->fileSize;
|
||||
}
|
||||
return {totalNarSize, totalDownloadSize};
|
||||
}
|
||||
|
||||
|
||||
const Store::Stats & Store::getStats()
|
||||
{
|
||||
|
|
|
@ -80,7 +80,6 @@ typedef std::map<std::string, StorePath> OutputPathMap;
|
|||
|
||||
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||
|
||||
/**
|
||||
* Magic header of exportPath() output (obsolete).
|
||||
|
@ -665,28 +664,6 @@ public:
|
|||
std::string makeValidityRegistration(const StorePathSet & paths,
|
||||
bool showDerivers, bool showHash);
|
||||
|
||||
/**
|
||||
* Write a JSON representation of store path metadata, such as the
|
||||
* hash and the references.
|
||||
*
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*
|
||||
* @param showClosureSize If true, the closure size of each path is
|
||||
* included.
|
||||
*/
|
||||
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
HashFormat hashFormat = HashFormat::Base32,
|
||||
AllowInvalidFlag allowInvalid = DisallowInvalid);
|
||||
|
||||
/**
|
||||
* @return the size of the closure of the specified path, that is,
|
||||
* the sum of the size of the NAR serialisation of each path in the
|
||||
* closure.
|
||||
*/
|
||||
std::pair<uint64_t, uint64_t> getClosureSize(const StorePath & storePath);
|
||||
|
||||
/**
|
||||
* Optimise the disk space usage of the Nix store by hard-linking files
|
||||
* with the same contents.
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* The path to the `unit-test-data` directory. See the contributing
|
||||
* guide in the manual for further details.
|
||||
*/
|
||||
static Path getUnitTestData() {
|
||||
return getEnv("_NIX_TEST_UNIT_DATA").value();
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether we should update "golden masters" instead of running tests
|
||||
* against them. See the contributing guide in the manual for further
|
||||
* details.
|
||||
*/
|
||||
static bool testAccept() {
|
||||
return getEnv("_NIX_TEST_ACCEPT") == "1";
|
||||
}
|
||||
|
||||
constexpr std::string_view cannotReadGoldenMaster =
|
||||
"Cannot read golden master because another test is also updating it";
|
||||
|
||||
constexpr std::string_view updatingGoldenMaster =
|
||||
"Updating golden master";
|
||||
}
|
|
@ -20,16 +20,9 @@ public:
|
|||
* Golden test for `T` reading
|
||||
*/
|
||||
template<typename T>
|
||||
void readTest(PathView testStem, T value)
|
||||
void readProtoTest(PathView testStem, const T & expected)
|
||||
{
|
||||
if (testAccept())
|
||||
{
|
||||
GTEST_SKIP() << cannotReadGoldenMaster;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto encoded = readFile(goldenMaster(testStem));
|
||||
|
||||
CharacterizationTest::readTest(testStem, [&](const auto & encoded) {
|
||||
T got = ({
|
||||
StringSource from { encoded };
|
||||
CommonProto::Serialise<T>::read(
|
||||
|
@ -37,44 +30,33 @@ public:
|
|||
CommonProto::ReadConn { .from = from });
|
||||
});
|
||||
|
||||
ASSERT_EQ(got, value);
|
||||
}
|
||||
ASSERT_EQ(got, expected);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Golden test for `T` write
|
||||
*/
|
||||
template<typename T>
|
||||
void writeTest(PathView testStem, const T & value)
|
||||
void writeProtoTest(PathView testStem, const T & decoded)
|
||||
{
|
||||
auto file = goldenMaster(testStem);
|
||||
|
||||
CharacterizationTest::writeTest(testStem, [&]() -> std::string {
|
||||
StringSink to;
|
||||
CommonProto::write(
|
||||
CommonProto::Serialise<T>::write(
|
||||
*store,
|
||||
CommonProto::WriteConn { .to = to },
|
||||
value);
|
||||
|
||||
if (testAccept())
|
||||
{
|
||||
createDirs(dirOf(file));
|
||||
writeFile(file, to.s);
|
||||
GTEST_SKIP() << updatingGoldenMaster;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto expected = readFile(file);
|
||||
ASSERT_EQ(to.s, expected);
|
||||
}
|
||||
decoded);
|
||||
return to.s;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
#define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
TEST_F(CommonProtoTest, NAME ## _read) { \
|
||||
readTest(STEM, VALUE); \
|
||||
readProtoTest(STEM, VALUE); \
|
||||
} \
|
||||
TEST_F(CommonProtoTest, NAME ## _write) { \
|
||||
writeTest(STEM, VALUE); \
|
||||
writeProtoTest(STEM, VALUE); \
|
||||
}
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
|
|
|
@ -11,20 +11,20 @@ namespace nix {
|
|||
|
||||
using nlohmann::json;
|
||||
|
||||
class DerivationTest : public LibStoreTest
|
||||
class DerivationTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/derivation";
|
||||
|
||||
public:
|
||||
Path goldenMaster(std::string_view testStem) const override {
|
||||
return unitTestData + "/" + testStem;
|
||||
}
|
||||
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
|
||||
Path unitTestData = getUnitTestData() + "/libstore/derivation";
|
||||
|
||||
Path goldenMaster(std::string_view testStem) {
|
||||
return unitTestData + "/" + testStem;
|
||||
}
|
||||
};
|
||||
|
||||
class CaDerivationTest : public DerivationTest
|
||||
|
@ -73,14 +73,8 @@ TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) {
|
|||
|
||||
#define TEST_JSON(FIXTURE, NAME, VAL, DRV_NAME, OUTPUT_NAME) \
|
||||
TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _from_json) { \
|
||||
if (testAccept()) \
|
||||
{ \
|
||||
GTEST_SKIP() << cannotReadGoldenMaster; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
auto encoded = json::parse( \
|
||||
readFile(goldenMaster("output-" #NAME ".json"))); \
|
||||
readTest("output-" #NAME ".json", [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
DerivationOutput got = DerivationOutput::fromJSON( \
|
||||
*store, \
|
||||
DRV_NAME, \
|
||||
|
@ -89,28 +83,20 @@ TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) {
|
|||
mockXpSettings); \
|
||||
DerivationOutput expected { VAL }; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
} \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _to_json) { \
|
||||
auto file = goldenMaster("output-" #NAME ".json"); \
|
||||
\
|
||||
json got = DerivationOutput { VAL }.toJSON( \
|
||||
writeTest("output-" #NAME ".json", [&]() -> json { \
|
||||
return DerivationOutput { (VAL) }.toJSON( \
|
||||
*store, \
|
||||
DRV_NAME, \
|
||||
OUTPUT_NAME); \
|
||||
\
|
||||
if (testAccept()) \
|
||||
{ \
|
||||
createDirs(dirOf(file)); \
|
||||
writeFile(file, got.dump(2) + "\n"); \
|
||||
GTEST_SKIP() << updatingGoldenMaster; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
auto expected = json::parse(readFile(file)); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
} \
|
||||
(DRV_NAME), \
|
||||
(OUTPUT_NAME)); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
TEST_JSON(DerivationTest, inputAddressed,
|
||||
|
@ -167,50 +153,30 @@ TEST_JSON(ImpureDerivationTest, impure,
|
|||
|
||||
#define TEST_JSON(FIXTURE, NAME, VAL) \
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _from_json) { \
|
||||
if (testAccept()) \
|
||||
{ \
|
||||
GTEST_SKIP() << cannotReadGoldenMaster; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
auto encoded = json::parse( \
|
||||
readFile(goldenMaster( #NAME ".json"))); \
|
||||
readTest(#NAME ".json", [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
Derivation expected { VAL }; \
|
||||
Derivation got = Derivation::fromJSON( \
|
||||
*store, \
|
||||
encoded, \
|
||||
mockXpSettings); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
} \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _to_json) { \
|
||||
auto file = goldenMaster( #NAME ".json"); \
|
||||
\
|
||||
json got = Derivation { VAL }.toJSON(*store); \
|
||||
\
|
||||
if (testAccept()) \
|
||||
{ \
|
||||
createDirs(dirOf(file)); \
|
||||
writeFile(file, got.dump(2) + "\n"); \
|
||||
GTEST_SKIP() << updatingGoldenMaster; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
auto expected = json::parse(readFile(file)); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
} \
|
||||
writeTest(#NAME ".json", [&]() -> json { \
|
||||
return Derivation { VAL }.toJSON(*store); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
#define TEST_ATERM(FIXTURE, NAME, VAL, DRV_NAME) \
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _from_aterm) { \
|
||||
if (testAccept()) \
|
||||
{ \
|
||||
GTEST_SKIP() << cannotReadGoldenMaster; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
auto encoded = readFile(goldenMaster( #NAME ".drv")); \
|
||||
readTest(#NAME ".drv", [&](auto encoded) { \
|
||||
Derivation expected { VAL }; \
|
||||
auto got = parseDerivation( \
|
||||
*store, \
|
||||
|
@ -219,25 +185,13 @@ TEST_JSON(ImpureDerivationTest, impure,
|
|||
mockXpSettings); \
|
||||
ASSERT_EQ(got.toJSON(*store), expected.toJSON(*store)) ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
} \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _to_aterm) { \
|
||||
auto file = goldenMaster( #NAME ".drv"); \
|
||||
\
|
||||
auto got = (VAL).unparse(*store, false); \
|
||||
\
|
||||
if (testAccept()) \
|
||||
{ \
|
||||
createDirs(dirOf(file)); \
|
||||
writeFile(file, got); \
|
||||
GTEST_SKIP() << updatingGoldenMaster; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
auto expected = readFile(file); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
} \
|
||||
writeTest(#NAME ".drv", [&]() -> std::string { \
|
||||
return (VAL).unparse(*store, false); \
|
||||
}); \
|
||||
}
|
||||
|
||||
Derivation makeSimpleDrv(const Store & store) {
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class LibStoreTest : public ::testing::Test {
|
||||
class LibStoreTest : public virtual ::testing::Test {
|
||||
public:
|
||||
static void SetUpTestSuite() {
|
||||
initLibStore();
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#include "machines.hh"
|
||||
#include "globals.hh"
|
||||
#include "file-system.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include <gmock/gmock-matchers.h>
|
||||
|
||||
|
|
85
src/libstore/tests/nar-info.cc
Normal file
85
src/libstore/tests/nar-info.cc
Normal file
|
@ -0,0 +1,85 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "path-info.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
class NarInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/nar-info";
|
||||
|
||||
Path goldenMaster(PathView testStem) const override {
|
||||
return unitTestData + "/" + testStem + ".json";
|
||||
}
|
||||
};
|
||||
|
||||
static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) {
|
||||
NarInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
|
||||
.references = {
|
||||
.others = {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
},
|
||||
.self = true,
|
||||
},
|
||||
},
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.narSize = 34878;
|
||||
if (includeImpureInfo) {
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.ultimate = true;
|
||||
info.sigs = { "asdf", "qwer" };
|
||||
|
||||
info.url = "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz";
|
||||
info.compression = "xz";
|
||||
info.fileHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=");
|
||||
info.fileSize = 4029176;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _from_json) { \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
NarInfo got = NarInfo::fromJSON( \
|
||||
*store, \
|
||||
expected.path, \
|
||||
encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _to_json) { \
|
||||
writeTest(#STEM, [&]() -> json { \
|
||||
return makeNarInfo(*store, PURE) \
|
||||
.toJSON(*store, PURE, HashFormat::SRI); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
JSON_TEST(impure, true)
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue