mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-21 21:46:15 +02:00
Merge remote-tracking branch 'edolstra/lazy-trees' into lazy-trees
This commit is contained in:
commit
c4c2fc24d7
123 changed files with 4063 additions and 1901 deletions
|
@ -177,13 +177,16 @@ fi
|
|||
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
||||
|
||||
|
||||
# Checks for libarchive
|
||||
# Look for libarchive.
|
||||
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
||||
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
||||
if test "$shared" != yes; then
|
||||
LIBARCHIVE_LIBS+=' -lz'
|
||||
fi
|
||||
|
||||
# Look for libzip.
|
||||
PKG_CHECK_MODULES([LIBZIP], [libzip])
|
||||
|
||||
# Look for SQLite, a required dependency.
|
||||
PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CXXFLAGS"])
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{ toplevel }:
|
||||
|
||||
with builtins;
|
||||
with import ./utils.nix;
|
||||
with import <nix/utils.nix>;
|
||||
|
||||
let
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ dummy-env = env -i \
|
|||
NIX_STATE_DIR=/dummy \
|
||||
NIX_CONFIG='cores = 0'
|
||||
|
||||
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
|
||||
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw
|
||||
|
||||
$(d)/%.1: $(d)/src/command-ref/%.md
|
||||
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
|
||||
|
|
|
@ -8,41 +8,10 @@ Most Nix commands interpret the following environment variables:
|
|||
|
||||
- [`NIX_PATH`]{#env-NIX_PATH}\
|
||||
A colon-separated list of directories used to look up Nix
|
||||
expressions enclosed in angle brackets (i.e., `<path>`). For
|
||||
instance, the value
|
||||
|
||||
/home/eelco/Dev:/etc/nixos
|
||||
|
||||
will cause Nix to look for paths relative to `/home/eelco/Dev` and
|
||||
`/etc/nixos`, in this order. It is also possible to match paths
|
||||
against a prefix. For example, the value
|
||||
|
||||
nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos
|
||||
|
||||
will cause Nix to search for `<nixpkgs/path>` in
|
||||
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
|
||||
|
||||
If a path in the Nix search path starts with `http://` or
|
||||
`https://`, it is interpreted as the URL of a tarball that will be
|
||||
downloaded and unpacked to a temporary location. The tarball must
|
||||
consist of a single top-level directory. For example, setting
|
||||
`NIX_PATH` to
|
||||
|
||||
nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
|
||||
|
||||
tells Nix to download and use the current contents of the
|
||||
`master` branch in the `nixpkgs` repository.
|
||||
|
||||
The URLs of the tarballs from the official nixos.org channels (see
|
||||
[the manual for `nix-channel`](nix-channel.md)) can be abbreviated
|
||||
as `channel:<channel-name>`. For instance, the following two
|
||||
values of `NIX_PATH` are equivalent:
|
||||
|
||||
nixpkgs=channel:nixos-21.05
|
||||
nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
|
||||
|
||||
The Nix search path can also be extended using the `-I` option to
|
||||
many Nix commands, which takes precedence over `NIX_PATH`.
|
||||
expressions enclosed in angle brackets (i.e., `<path>`),
|
||||
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||
`-I` option. For more information about the semantics of the Nix
|
||||
search path, see the documentation for `-I`.
|
||||
|
||||
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
|
||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||
|
|
|
@ -5,3 +5,11 @@
|
|||
arguments will be ignored and the resulting derivation will have
|
||||
`__impure` set to `true`, making it an impure derivation.
|
||||
|
||||
* You can now use flake references in the old CLI, e.g.
|
||||
|
||||
```
|
||||
# nix-build flake:nixpkgs -A hello
|
||||
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
|
||||
'<nixpkgs>' -A hello
|
||||
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
|
||||
```
|
||||
|
|
|
@ -112,6 +112,11 @@
|
|||
bzip2 xz brotli editline
|
||||
openssl sqlite
|
||||
libarchive
|
||||
(libzip.overrideDerivation (old: {
|
||||
# Temporary workaround for https://github.com/NixOS/nixpkgs/pull/178755
|
||||
cmakeFlags = old.cmakeFlags or [] ++ [ "-DBUILD_REGRESS=0" ];
|
||||
patches = [ ./libzip-unix-time.patch ];
|
||||
}))
|
||||
boost
|
||||
lowdown-nix
|
||||
gtest
|
||||
|
|
19
libzip-unix-time.patch
Normal file
19
libzip-unix-time.patch
Normal file
|
@ -0,0 +1,19 @@
|
|||
commit 26e8c76ca84999fa5c0e46a9fc3aa7de80be2e9c
|
||||
Author: Eelco Dolstra <edolstra@gmail.com>
|
||||
Date: Mon Oct 10 17:12:47 2022 +0200
|
||||
|
||||
Return time_t in the Unix epoch
|
||||
|
||||
diff --git a/lib/zip_dirent.c b/lib/zip_dirent.c
|
||||
index 7fd2f7ce..5c050b4c 100644
|
||||
--- a/lib/zip_dirent.c
|
||||
+++ b/lib/zip_dirent.c
|
||||
@@ -1018,7 +1018,7 @@ _zip_d2u_time(zip_uint16_t dtime, zip_uint16_t ddate) {
|
||||
tm.tm_min = (dtime >> 5) & 63;
|
||||
tm.tm_sec = (dtime << 1) & 62;
|
||||
|
||||
- return mktime(&tm);
|
||||
+ return timegm(&tm);
|
||||
}
|
||||
|
||||
|
|
@ -208,8 +208,11 @@ void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePath
|
|||
run(store, *storePaths.begin());
|
||||
}
|
||||
|
||||
Strings editorFor(const Path & file, uint32_t line)
|
||||
Strings editorFor(const SourcePath & file, uint32_t line)
|
||||
{
|
||||
auto path = file.getPhysicalPath();
|
||||
if (!path)
|
||||
throw Error("cannot open '%s' in an editor because it has no physical path", file);
|
||||
auto editor = getEnv("EDITOR").value_or("cat");
|
||||
auto args = tokenizeString<Strings>(editor);
|
||||
if (line > 0 && (
|
||||
|
@ -218,7 +221,7 @@ Strings editorFor(const Path & file, uint32_t line)
|
|||
editor.find("vim") != std::string::npos ||
|
||||
editor.find("kak") != std::string::npos))
|
||||
args.push_back(fmt("+%d", line));
|
||||
args.push_back(file);
|
||||
args.push_back(path->abs());
|
||||
return args;
|
||||
}
|
||||
|
||||
|
|
|
@ -238,7 +238,7 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name)
|
|||
|
||||
/* Helper function to generate args that invoke $EDITOR on
|
||||
filename:lineno. */
|
||||
Strings editorFor(const Path & file, uint32_t line);
|
||||
Strings editorFor(const SourcePath & file, uint32_t line);
|
||||
|
||||
struct MixProfile : virtual StoreCommand
|
||||
{
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
#include "flake/flakeref.hh"
|
||||
#include "store-api.hh"
|
||||
#include "command.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "tarball.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -32,7 +34,68 @@ MixEvalArgs::MixEvalArgs()
|
|||
addFlag({
|
||||
.longName = "include",
|
||||
.shortName = 'I',
|
||||
.description = "Add *path* to the list of locations used to look up `<...>` file names.",
|
||||
.description = R"(
|
||||
Add *path* to the Nix search path. The Nix search path is
|
||||
initialized from the colon-separated `NIX_PATH` environment
|
||||
variable, and is used to look up Nix expressions enclosed in angle
|
||||
brackets (i.e., `<nixpkgs>`). For instance, if the Nix search path
|
||||
consists of the entries
|
||||
|
||||
```
|
||||
/home/eelco/Dev
|
||||
/etc/nixos
|
||||
```
|
||||
|
||||
Nix will look for paths relative to `/home/eelco/Dev` and
|
||||
`/etc/nixos`, in this order. It is also possible to match paths
|
||||
against a prefix. For example, the search path
|
||||
|
||||
```
|
||||
nixpkgs=/home/eelco/Dev/nixpkgs-branch
|
||||
/etc/nixos
|
||||
```
|
||||
|
||||
will cause Nix to search for `<nixpkgs/path>` in
|
||||
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
|
||||
|
||||
If a path in the Nix search path starts with `http://` or `https://`,
|
||||
it is interpreted as the URL of a tarball that will be downloaded and
|
||||
unpacked to a temporary location. The tarball must consist of a single
|
||||
top-level directory. For example, setting `NIX_PATH` to
|
||||
|
||||
```
|
||||
nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
|
||||
```
|
||||
|
||||
tells Nix to download and use the current contents of the `master`
|
||||
branch in the `nixpkgs` repository.
|
||||
|
||||
The URLs of the tarballs from the official `nixos.org` channels
|
||||
(see [the manual page for `nix-channel`](nix-channel.md)) can be
|
||||
abbreviated as `channel:<channel-name>`. For instance, the
|
||||
following two values of `NIX_PATH` are equivalent:
|
||||
|
||||
```
|
||||
nixpkgs=channel:nixos-21.05
|
||||
nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
|
||||
```
|
||||
|
||||
You can also use refer to source trees looked up in the flake
|
||||
registry. For instance,
|
||||
|
||||
```
|
||||
nixpkgs=flake:nixpkgs
|
||||
```
|
||||
|
||||
specifies that the prefix `nixpkgs` shall refer to the source tree
|
||||
downloaded from the `nixpkgs` entry in the flake registry. Similarly,
|
||||
|
||||
```
|
||||
nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05
|
||||
|
||||
makes `<nixpkgs>` refer to a particular branch of the
|
||||
`NixOS/nixpkgs` repository on GitHub.
|
||||
```)",
|
||||
.category = category,
|
||||
.labels = {"path"},
|
||||
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
||||
|
@ -79,7 +142,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
|||
for (auto & i : autoArgs) {
|
||||
auto v = state.allocValue();
|
||||
if (i.second[0] == 'E')
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), absPath(".")));
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(absPath("."))));
|
||||
else
|
||||
v->mkString(((std::string_view) i.second).substr(1));
|
||||
res.insert(state.symbols.create(i.first), v);
|
||||
|
@ -87,17 +150,29 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
|||
return res.finish();
|
||||
}
|
||||
|
||||
Path lookupFileArg(EvalState & state, std::string_view s)
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
||||
{
|
||||
if (isUri(s)) {
|
||||
return state.store->toRealPath(
|
||||
fetchers::downloadTarball(
|
||||
state.store, resolveUri(s), "source", false).first.storePath);
|
||||
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first;
|
||||
auto accessor = makeStorePathAccessor(state.store, storePath);
|
||||
state.registerAccessor(accessor);
|
||||
return accessor->root();
|
||||
}
|
||||
|
||||
else if (hasPrefix(s, "flake:")) {
|
||||
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
|
||||
auto [accessor, _] = flakeRef.resolve(state.store).lazyFetch(state.store);
|
||||
return accessor->root();
|
||||
}
|
||||
|
||||
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||
Path p(s.substr(1, s.size() - 2));
|
||||
return state.findFile(p);
|
||||
} else
|
||||
return absPath(std::string(s));
|
||||
}
|
||||
|
||||
else
|
||||
return state.rootPath(absPath(std::string(s)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ namespace nix {
|
|||
class Store;
|
||||
class EvalState;
|
||||
class Bindings;
|
||||
struct SourcePath;
|
||||
|
||||
struct MixEvalArgs : virtual Args
|
||||
{
|
||||
|
@ -24,6 +25,6 @@ private:
|
|||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
Path lookupFileArg(EvalState & state, std::string_view s);
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s);
|
||||
|
||||
}
|
||||
|
|
|
@ -275,9 +275,10 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
|||
|
||||
evalSettings.pureEval = false;
|
||||
auto state = getEvalState();
|
||||
Expr *e = state->parseExprFromFile(
|
||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
||||
);
|
||||
Expr *e =
|
||||
state->parseExprFromFile(
|
||||
resolveExprPath(
|
||||
lookupFileArg(*state, *file)));
|
||||
|
||||
Value root;
|
||||
state->eval(e, root);
|
||||
|
@ -635,10 +636,10 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
EvalState & state,
|
||||
std::shared_ptr<flake::LockedFlake> lockedFlake)
|
||||
{
|
||||
auto fingerprint = lockedFlake->getFingerprint();
|
||||
auto fingerprint = lockedFlake->getFingerprint(state.store);
|
||||
return make_ref<nix::eval_cache::EvalCache>(
|
||||
evalSettings.useEvalCache && evalSettings.pureEval
|
||||
? std::optional { std::cref(fingerprint) }
|
||||
? fingerprint
|
||||
: std::nullopt,
|
||||
state,
|
||||
[&state, lockedFlake]()
|
||||
|
@ -885,10 +886,11 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
if (file == "-") {
|
||||
auto e = state->parseStdin();
|
||||
state->eval(e, *vFile);
|
||||
} else if (file)
|
||||
}
|
||||
else if (file)
|
||||
state->evalFile(lookupFileArg(*state, *file), *vFile);
|
||||
else {
|
||||
auto e = state->parseExprFromString(*expr, absPath("."));
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(absPath(".")));
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
|
||||
|
|
|
@ -215,17 +215,15 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
|||
out << dt.hint.str() << "\n";
|
||||
|
||||
// prefer direct pos, but if noPos then try the expr.
|
||||
auto pos = *dt.pos
|
||||
? *dt.pos
|
||||
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
||||
auto pos = dt.pos
|
||||
? dt.pos
|
||||
: (std::shared_ptr<AbstractPos>) positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
||||
|
||||
if (pos) {
|
||||
printAtPos(pos, out);
|
||||
|
||||
auto loc = getCodeLines(pos);
|
||||
if (loc.has_value()) {
|
||||
out << pos;
|
||||
if (auto loc = pos->getCodeLines()) {
|
||||
out << "\n";
|
||||
printCodeLines(out, "", pos, *loc);
|
||||
printCodeLines(out, "", *pos, *loc);
|
||||
out << "\n";
|
||||
}
|
||||
}
|
||||
|
@ -584,15 +582,17 @@ bool NixRepl::processLine(std::string line)
|
|||
Value v;
|
||||
evalString(arg, v);
|
||||
|
||||
const auto [file, line] = [&] () -> std::pair<std::string, uint32_t> {
|
||||
const auto [path, line] = [&] () -> std::pair<SourcePath, uint32_t> {
|
||||
if (v.type() == nPath || v.type() == nString) {
|
||||
PathSet context;
|
||||
auto filename = state->coerceToString(noPos, v, context).toOwned();
|
||||
state->symbols.create(filename);
|
||||
return {filename, 0};
|
||||
auto path = state->coerceToPath(noPos, v, context);
|
||||
return {path, 0};
|
||||
} else if (v.isLambda()) {
|
||||
auto pos = state->positions[v.lambda.fun->pos];
|
||||
return {pos.file, pos.line};
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
return {*path, pos.line};
|
||||
else
|
||||
throw Error("'%s' cannot be shown in an editor", pos);
|
||||
} else {
|
||||
// assume it's a derivation
|
||||
return findPackageFilename(*state, v, arg);
|
||||
|
@ -600,7 +600,7 @@ bool NixRepl::processLine(std::string line)
|
|||
}();
|
||||
|
||||
// Open in EDITOR
|
||||
auto args = editorFor(file, line);
|
||||
auto args = editorFor(path, line);
|
||||
auto editor = args.front();
|
||||
args.pop_front();
|
||||
|
||||
|
@ -782,7 +782,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
|
|||
flake::LockFlags {
|
||||
.updateLockFile = false,
|
||||
.useRegistries = !evalSettings.pureEval,
|
||||
.allowMutable = !evalSettings.pureEval,
|
||||
.allowUnlocked = !evalSettings.pureEval,
|
||||
}),
|
||||
v);
|
||||
addAttrsToScope(v);
|
||||
|
@ -859,7 +859,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
|
|||
|
||||
Expr * NixRepl::parseString(std::string s)
|
||||
{
|
||||
Expr * e = state->parseExprFromString(std::move(s), curDir, staticEnv);
|
||||
Expr * e = state->parseExprFromString(std::move(s), state->rootPath(curDir), staticEnv);
|
||||
return e;
|
||||
}
|
||||
|
||||
|
@ -917,7 +917,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
|||
break;
|
||||
|
||||
case nPath:
|
||||
str << ANSI_GREEN << v.path << ANSI_NORMAL; // !!! escaping?
|
||||
str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping?
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
|
|
|
@ -106,7 +106,7 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
|||
}
|
||||
|
||||
|
||||
std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what)
|
||||
std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what)
|
||||
{
|
||||
Value * v2;
|
||||
try {
|
||||
|
@ -118,21 +118,25 @@ std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value &
|
|||
|
||||
// FIXME: is it possible to extract the Pos object instead of doing this
|
||||
// toString + parsing?
|
||||
auto pos = state.forceString(*v2);
|
||||
PathSet context;
|
||||
auto path = state.coerceToPath(noPos, *v2, context);
|
||||
|
||||
auto colon = pos.rfind(':');
|
||||
if (colon == std::string::npos)
|
||||
throw ParseError("cannot parse meta.position attribute '%s'", pos);
|
||||
auto fn = path.path.abs();
|
||||
|
||||
auto fail = [fn]() {
|
||||
throw ParseError("cannot parse 'meta.position' attribute '%s'", fn);
|
||||
};
|
||||
|
||||
std::string filename(pos, 0, colon);
|
||||
unsigned int lineno;
|
||||
try {
|
||||
lineno = std::stoi(std::string(pos, colon + 1, std::string::npos));
|
||||
auto colon = fn.rfind(':');
|
||||
if (colon == std::string::npos) fail();
|
||||
std::string filename(fn, 0, colon);
|
||||
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
|
||||
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
|
||||
} catch (std::invalid_argument & e) {
|
||||
throw ParseError("cannot parse line number '%s'", pos);
|
||||
fail();
|
||||
abort();
|
||||
}
|
||||
|
||||
return { std::move(filename), lineno };
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ std::pair<Value *, PosIdx> findAlongAttrPath(
|
|||
Value & vIn);
|
||||
|
||||
/* Heuristic to find the filename and lineno or a nix value. */
|
||||
std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what);
|
||||
std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what);
|
||||
|
||||
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s);
|
||||
|
||||
|
|
|
@ -442,8 +442,10 @@ Value & AttrCursor::forceValue()
|
|||
if (v.type() == nString)
|
||||
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
|
||||
string_t{v.string.s, {}}};
|
||||
else if (v.type() == nPath)
|
||||
cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}};
|
||||
else if (v.type() == nPath) {
|
||||
auto path = v.path().path;
|
||||
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
|
||||
}
|
||||
else if (v.type() == nBool)
|
||||
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
||||
else if (v.type() == nInt)
|
||||
|
@ -580,7 +582,7 @@ std::string AttrCursor::getString()
|
|||
if (v.type() != nString && v.type() != nPath)
|
||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
|
||||
|
||||
return v.type() == nString ? v.string.s : v.path;
|
||||
return v.type() == nString ? v.string.s : v.path().to_string();
|
||||
}
|
||||
|
||||
string_t AttrCursor::getStringWithContext()
|
||||
|
@ -611,7 +613,7 @@ string_t AttrCursor::getStringWithContext()
|
|||
if (v.type() == nString)
|
||||
return {v.string.s, v.getContext(*root->state.store)};
|
||||
else if (v.type() == nPath)
|
||||
return {v.path, {}};
|
||||
return {v.path().to_string(), {}};
|
||||
else
|
||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
|
||||
}
|
||||
|
@ -645,17 +647,17 @@ NixInt AttrCursor::getInt()
|
|||
cachedValue = root->db->getAttr(getKey());
|
||||
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||
if (auto i = std::get_if<int_t>(&cachedValue->second)) {
|
||||
debug("using cached Integer attribute '%s'", getAttrPathStr());
|
||||
debug("using cached integer attribute '%s'", getAttrPathStr());
|
||||
return i->x;
|
||||
} else
|
||||
throw TypeError("'%s' is not an Integer", getAttrPathStr());
|
||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nInt)
|
||||
throw TypeError("'%s' is not an Integer", getAttrPathStr());
|
||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||
|
||||
return v.integer;
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "filetransfer.hh"
|
||||
#include "json.hh"
|
||||
#include "function-trace.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
@ -120,7 +121,7 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
|
|||
str << "\"";
|
||||
break;
|
||||
case tPath:
|
||||
str << path; // !!! escaping?
|
||||
str << path().to_string(); // !!! escaping?
|
||||
break;
|
||||
case tNull:
|
||||
str << "null";
|
||||
|
@ -404,7 +405,8 @@ static Strings parseNixPath(const std::string & s)
|
|||
}
|
||||
|
||||
if (*p == ':') {
|
||||
if (isUri(std::string(start2, s.end()))) {
|
||||
auto prefix = std::string(start2, s.end());
|
||||
if (EvalSettings::isPseudoUrl(prefix) || hasPrefix(prefix, "flake:")) {
|
||||
++p;
|
||||
while (p != s.end() && *p != ':') ++p;
|
||||
}
|
||||
|
@ -462,6 +464,28 @@ EvalState::EvalState(
|
|||
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||
, repair(NoRepair)
|
||||
, emptyBindings(0)
|
||||
, rootFS(
|
||||
makeFSInputAccessor(
|
||||
CanonPath::root,
|
||||
evalSettings.restrictEval || evalSettings.pureEval
|
||||
? std::optional<std::set<CanonPath>>(std::set<CanonPath>())
|
||||
: std::nullopt,
|
||||
[](const CanonPath & path) -> RestrictedPathError {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure eval mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||
}))
|
||||
, corepkgsFS(makeMemoryInputAccessor())
|
||||
, internalFS(makeMemoryInputAccessor())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
CanonPath("derivation-internal.nix"),
|
||||
#include "primops/derivation.nix.gen.hh"
|
||||
)}
|
||||
, callFlakeInternal{internalFS->addFile(
|
||||
CanonPath("call-flake.nix"),
|
||||
#include "flake/call-flake.nix.gen.hh"
|
||||
)}
|
||||
, store(store)
|
||||
, buildStore(buildStore ? buildStore : store)
|
||||
, debugRepl(nullptr)
|
||||
|
@ -479,6 +503,9 @@ EvalState::EvalState(
|
|||
, baseEnv(allocEnv(128))
|
||||
, staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)}
|
||||
{
|
||||
corepkgsFS->setPathDisplay("<nix", ">");
|
||||
internalFS->setPathDisplay("«nix-internal»", "");
|
||||
|
||||
countCalls = getEnv("NIX_COUNT_CALLS").value_or("0") != "0";
|
||||
|
||||
assert(gcInitialised);
|
||||
|
@ -491,28 +518,15 @@ EvalState::EvalState(
|
|||
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
|
||||
}
|
||||
|
||||
if (evalSettings.restrictEval || evalSettings.pureEval) {
|
||||
allowedPaths = PathSet();
|
||||
/* Allow access to all paths in the search path. */
|
||||
if (rootFS->hasAccessControl())
|
||||
for (auto & i : searchPath)
|
||||
resolveSearchPathElem(i, true);
|
||||
|
||||
for (auto & i : searchPath) {
|
||||
auto r = resolveSearchPathElem(i);
|
||||
if (!r.first) continue;
|
||||
|
||||
auto path = r.second;
|
||||
|
||||
if (store->isInStore(r.second)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(r.second).first, closure);
|
||||
for (auto & path : closure)
|
||||
allowPath(path);
|
||||
} catch (InvalidPath &) {
|
||||
allowPath(r.second);
|
||||
}
|
||||
} else
|
||||
allowPath(r.second);
|
||||
}
|
||||
}
|
||||
corepkgsFS->addFile(
|
||||
CanonPath("fetchurl.nix"),
|
||||
#include "fetchurl.nix.gen.hh"
|
||||
);
|
||||
|
||||
createBaseEnv();
|
||||
}
|
||||
|
@ -525,14 +539,12 @@ EvalState::~EvalState()
|
|||
|
||||
void EvalState::allowPath(const Path & path)
|
||||
{
|
||||
if (allowedPaths)
|
||||
allowedPaths->insert(path);
|
||||
rootFS->allowPath(CanonPath(path));
|
||||
}
|
||||
|
||||
void EvalState::allowPath(const StorePath & storePath)
|
||||
{
|
||||
if (allowedPaths)
|
||||
allowedPaths->insert(store->toRealPath(storePath));
|
||||
rootFS->allowPath(CanonPath(store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
||||
|
@ -543,52 +555,6 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
|||
v.mkString(path, PathSet({path}));
|
||||
}
|
||||
|
||||
Path EvalState::checkSourcePath(const Path & path_)
|
||||
{
|
||||
if (!allowedPaths) return path_;
|
||||
|
||||
auto i = resolvedPaths.find(path_);
|
||||
if (i != resolvedPaths.end())
|
||||
return i->second;
|
||||
|
||||
bool found = false;
|
||||
|
||||
/* First canonicalize the path without symlinks, so we make sure an
|
||||
* attacker can't append ../../... to a path that would be in allowedPaths
|
||||
* and thus leak symlink targets.
|
||||
*/
|
||||
Path abspath = canonPath(path_);
|
||||
|
||||
if (hasPrefix(abspath, corepkgsPrefix)) return abspath;
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(abspath, i)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure eval mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation);
|
||||
}
|
||||
|
||||
/* Resolve symlinks. */
|
||||
debug(format("checking access to '%s'") % abspath);
|
||||
Path path = canonPath(abspath, true);
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(path, i)) {
|
||||
resolvedPaths[path_] = path;
|
||||
return path;
|
||||
}
|
||||
}
|
||||
|
||||
throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::checkURI(const std::string & uri)
|
||||
{
|
||||
|
@ -609,12 +575,12 @@ void EvalState::checkURI(const std::string & uri)
|
|||
/* If the URI is a path, then check it against allowedPaths as
|
||||
well. */
|
||||
if (hasPrefix(uri, "/")) {
|
||||
checkSourcePath(uri);
|
||||
rootFS->checkAllowed(CanonPath(uri));
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasPrefix(uri, "file://")) {
|
||||
checkSourcePath(std::string(uri, 7));
|
||||
rootFS->checkAllowed(CanonPath(uri.substr(7)));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -824,7 +790,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
|||
? std::make_unique<DebugTraceStacker>(
|
||||
*this,
|
||||
DebugTrace {
|
||||
.pos = error->info().errPos ? *error->info().errPos : positions[expr.getPos()],
|
||||
.pos = error->info().errPos ? error->info().errPos : (std::shared_ptr<AbstractPos>) positions[expr.getPos()],
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = error->info().msg,
|
||||
|
@ -884,7 +850,7 @@ void EvalState::throwEvalError(const PosIdx pos, const Suggestions & suggestions
|
|||
}), env, expr);
|
||||
}
|
||||
|
||||
void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2)
|
||||
void EvalState::throwEvalError(const PosIdx pos, const char * s, std::string_view s2)
|
||||
{
|
||||
debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt(s, s2),
|
||||
|
@ -1013,7 +979,7 @@ void EvalState::throwMissingArgumentError(const PosIdx pos, const char * s, cons
|
|||
|
||||
void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
|
||||
{
|
||||
e.addTrace(std::nullopt, s, s2);
|
||||
e.addTrace(nullptr, s, s2);
|
||||
}
|
||||
|
||||
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const
|
||||
|
@ -1025,13 +991,13 @@ static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
|
|||
EvalState & state,
|
||||
Expr & expr,
|
||||
Env & env,
|
||||
std::optional<ErrPos> pos,
|
||||
std::shared_ptr<AbstractPos> && pos,
|
||||
const char * s,
|
||||
const std::string & s2)
|
||||
{
|
||||
return std::make_unique<DebugTraceStacker>(state,
|
||||
DebugTrace {
|
||||
.pos = pos,
|
||||
.pos = std::move(pos),
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = hintfmt(s, s2),
|
||||
|
@ -1079,9 +1045,9 @@ void Value::mkStringMove(const char * s, const PathSet & context)
|
|||
}
|
||||
|
||||
|
||||
void Value::mkPath(std::string_view s)
|
||||
void Value::mkPath(const SourcePath & path)
|
||||
{
|
||||
mkPath(makeImmutableString(s));
|
||||
mkPath(&*path.accessor, makeImmutableString(path.path.abs()));
|
||||
}
|
||||
|
||||
|
||||
|
@ -1137,9 +1103,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
|
|||
void EvalState::mkPos(Value & v, PosIdx p)
|
||||
{
|
||||
auto pos = positions[p];
|
||||
if (!pos.file.empty()) {
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin)) {
|
||||
auto attrs = buildBindings(3);
|
||||
attrs.alloc(sFile).mkString(pos.file);
|
||||
attrs.alloc(sFile).mkString(encodePath(*path));
|
||||
attrs.alloc(sLine).mkInt(pos.line);
|
||||
attrs.alloc(sColumn).mkInt(pos.column);
|
||||
v.mkAttrs(attrs);
|
||||
|
@ -1195,17 +1161,15 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
|
|||
}
|
||||
|
||||
|
||||
void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
|
||||
void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
||||
{
|
||||
auto path = checkSourcePath(path_);
|
||||
|
||||
FileEvalCache::iterator i;
|
||||
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
|
||||
v = i->second;
|
||||
return;
|
||||
}
|
||||
|
||||
Path resolvedPath = resolveExprPath(path);
|
||||
auto resolvedPath = resolveExprPath(path);
|
||||
if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) {
|
||||
v = i->second;
|
||||
return;
|
||||
|
@ -1219,26 +1183,8 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
|
|||
e = j->second;
|
||||
|
||||
if (!e)
|
||||
e = parseExprFromFile(checkSourcePath(resolvedPath));
|
||||
e = parseExprFromFile(resolvedPath);
|
||||
|
||||
cacheFile(path, resolvedPath, e, v, mustBeTrivial);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::resetFileCache()
|
||||
{
|
||||
fileEvalCache.clear();
|
||||
fileParseCache.clear();
|
||||
}
|
||||
|
||||
|
||||
void EvalState::cacheFile(
|
||||
const Path & path,
|
||||
const Path & resolvedPath,
|
||||
Expr * e,
|
||||
Value & v,
|
||||
bool mustBeTrivial)
|
||||
{
|
||||
fileParseCache[resolvedPath] = e;
|
||||
|
||||
try {
|
||||
|
@ -1247,8 +1193,8 @@ void EvalState::cacheFile(
|
|||
*this,
|
||||
*e,
|
||||
this->baseEnv,
|
||||
e->getPos() ? std::optional(ErrPos(positions[e->getPos()])) : std::nullopt,
|
||||
"while evaluating the file '%1%':", resolvedPath)
|
||||
e->getPos() ? (std::shared_ptr<AbstractPos>) positions[e->getPos()] : nullptr,
|
||||
"while evaluating the file '%1%':", resolvedPath.to_string())
|
||||
: nullptr;
|
||||
|
||||
// Enforce that 'flake.nix' is a direct attrset, not a
|
||||
|
@ -1258,7 +1204,7 @@ void EvalState::cacheFile(
|
|||
throw EvalError("file '%s' must be an attribute set", path);
|
||||
eval(e, v);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath);
|
||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string());
|
||||
throw;
|
||||
}
|
||||
|
||||
|
@ -1267,6 +1213,13 @@ void EvalState::cacheFile(
|
|||
}
|
||||
|
||||
|
||||
void EvalState::resetFileCache()
|
||||
{
|
||||
fileEvalCache.clear();
|
||||
fileParseCache.clear();
|
||||
}
|
||||
|
||||
|
||||
void EvalState::eval(Expr * e, Value & v)
|
||||
{
|
||||
e->eval(*this, baseEnv, v);
|
||||
|
@ -1518,10 +1471,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
|||
state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos ) );
|
||||
|
||||
} catch (Error & e) {
|
||||
auto pos2r = state.positions[pos2];
|
||||
if (pos2 && pos2r.file != state.derivationNixPath)
|
||||
state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'",
|
||||
showAttrPath(state, env, attrPath));
|
||||
if (pos2) {
|
||||
auto pos2r = state.positions[pos2];
|
||||
auto origin = std::get_if<SourcePath>(&pos2r.origin);
|
||||
if (!(origin && *origin == state.derivationInternal))
|
||||
state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'",
|
||||
showAttrPath(state, env, attrPath));
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
||||
|
@ -1661,7 +1617,8 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
(lambda.name
|
||||
? concatStrings("'", symbols[lambda.name], "'")
|
||||
: "anonymous lambda"));
|
||||
addErrorTrace(e, pos, "from call site%s", "");
|
||||
if (pos != noPos)
|
||||
addErrorTrace(e, pos, "from call site", "");
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
@ -1996,42 +1953,58 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
|
||||
Value values[es->size()];
|
||||
Value * vTmpP = values;
|
||||
std::shared_ptr<InputAccessor> accessor;
|
||||
|
||||
for (auto & [i_pos, i] : *es) {
|
||||
Value & vTmp = *vTmpP++;
|
||||
i->eval(state, env, vTmp);
|
||||
Value * vTmp = vTmpP++;
|
||||
i->eval(state, env, *vTmp);
|
||||
|
||||
if (vTmp->type() == nAttrs) {
|
||||
auto j = vTmp->attrs->find(state.sOutPath);
|
||||
if (j != vTmp->attrs->end())
|
||||
vTmp = j->value;
|
||||
}
|
||||
|
||||
/* If the first element is a path, then the result will also
|
||||
be a path, we don't copy anything (yet - that's done later,
|
||||
since paths are copied when they are used in a derivation),
|
||||
and none of the strings are allowed to have contexts. */
|
||||
if (first) {
|
||||
firstType = vTmp.type();
|
||||
firstType = vTmp->type();
|
||||
if (vTmp->type() == nPath) {
|
||||
accessor = vTmp->path().accessor;
|
||||
auto part = vTmp->path().path.abs();
|
||||
sSize += part.size();
|
||||
s.emplace_back(std::move(part));
|
||||
}
|
||||
}
|
||||
|
||||
if (firstType == nInt) {
|
||||
if (vTmp.type() == nInt) {
|
||||
n += vTmp.integer;
|
||||
} else if (vTmp.type() == nFloat) {
|
||||
if (vTmp->type() == nInt) {
|
||||
n += vTmp->integer;
|
||||
} else if (vTmp->type() == nFloat) {
|
||||
// Upgrade the type from int to float;
|
||||
firstType = nFloat;
|
||||
nf = n;
|
||||
nf += vTmp.fpoint;
|
||||
nf += vTmp->fpoint;
|
||||
} else
|
||||
state.throwEvalError(i_pos, "cannot add %1% to an integer", showType(vTmp), env, *this);
|
||||
state.throwEvalError(i_pos, "cannot add %1% to an integer", showType(*vTmp), env, *this);
|
||||
} else if (firstType == nFloat) {
|
||||
if (vTmp.type() == nInt) {
|
||||
nf += vTmp.integer;
|
||||
} else if (vTmp.type() == nFloat) {
|
||||
nf += vTmp.fpoint;
|
||||
if (vTmp->type() == nInt) {
|
||||
nf += vTmp->integer;
|
||||
} else if (vTmp->type() == nFloat) {
|
||||
nf += vTmp->fpoint;
|
||||
} else
|
||||
state.throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp), env, *this);
|
||||
state.throwEvalError(i_pos, "cannot add %1% to a float", showType(*vTmp), env, *this);
|
||||
} else if (firstType == nPath) {
|
||||
if (!first) {
|
||||
auto part = state.coerceToString(i_pos, *vTmp, context, false, false);
|
||||
sSize += part->size();
|
||||
s.emplace_back(std::move(part));
|
||||
}
|
||||
} else {
|
||||
if (s.empty()) s.reserve(es->size());
|
||||
/* skip canonization of first path, which would only be not
|
||||
canonized in the first place if it's coming from a ./${foo} type
|
||||
path */
|
||||
auto part = state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first);
|
||||
auto part = state.coerceToString(i_pos, *vTmp, context, false, firstType == nString);
|
||||
sSize += part->size();
|
||||
s.emplace_back(std::move(part));
|
||||
}
|
||||
|
@ -2046,7 +2019,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
else if (firstType == nPath) {
|
||||
if (!context.empty())
|
||||
state.throwEvalError(pos, "a string that refers to a store path cannot be appended to a path", env, *this);
|
||||
v.mkPath(canonPath(str()));
|
||||
v.mkPath({ref(accessor), CanonPath(str())});
|
||||
} else
|
||||
v.mkStringMove(c_str(), context);
|
||||
}
|
||||
|
@ -2237,7 +2210,7 @@ std::optional<std::string> EvalState::tryAttrsToString(const PosIdx pos, Value &
|
|||
}
|
||||
|
||||
BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||
bool coerceMore, bool copyToStore, bool canonicalizePath)
|
||||
bool coerceMore, bool copyToStore)
|
||||
{
|
||||
forceValue(v, pos);
|
||||
|
||||
|
@ -2247,12 +2220,10 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
|||
}
|
||||
|
||||
if (v.type() == nPath) {
|
||||
BackedStringView path(PathView(v.path));
|
||||
if (canonicalizePath)
|
||||
path = canonPath(*path);
|
||||
if (copyToStore)
|
||||
path = copyPathToStore(context, std::move(path).toOwned());
|
||||
return path;
|
||||
auto path = v.path();
|
||||
return copyToStore
|
||||
? store->printStorePath(copyPathToStore(context, path))
|
||||
: encodePath(path);
|
||||
}
|
||||
|
||||
if (v.type() == nAttrs) {
|
||||
|
@ -2294,36 +2265,47 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
|||
}
|
||||
|
||||
|
||||
std::string EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||
StorePath EvalState::copyPathToStore(PathSet & context, const SourcePath & path)
|
||||
{
|
||||
if (nix::isDerivation(path))
|
||||
throwEvalError("file names are not allowed to end in '%1%'", drvExtension);
|
||||
if (nix::isDerivation(path.path.abs()))
|
||||
throw EvalError("file names are not allowed to end in '%s'", drvExtension);
|
||||
|
||||
Path dstPath;
|
||||
auto i = srcToStore.find(path);
|
||||
if (i != srcToStore.end())
|
||||
dstPath = store->printStorePath(i->second);
|
||||
else {
|
||||
auto p = settings.readOnlyMode
|
||||
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
||||
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
||||
dstPath = store->printStorePath(p);
|
||||
allowPath(p);
|
||||
srcToStore.insert_or_assign(path, std::move(p));
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
||||
}
|
||||
|
||||
context.insert(dstPath);
|
||||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = path.fetchToStore(store, path.baseName(), nullptr, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
return dstPath;
|
||||
}();
|
||||
|
||||
context.insert(store->printStorePath(dstPath));
|
||||
return dstPath;
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context)
|
||||
SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context)
|
||||
{
|
||||
auto path = coerceToString(pos, v, context, false, false).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
throwEvalError(pos, "string '%1%' doesn't represent an absolute path", path);
|
||||
return path;
|
||||
forceValue(v, pos);
|
||||
|
||||
if (v.type() == nString) {
|
||||
copyContext(v, context);
|
||||
return decodePath(v.str(), pos);
|
||||
}
|
||||
|
||||
if (v.type() == nPath)
|
||||
return v.path();
|
||||
|
||||
if (v.type() == nAttrs) {
|
||||
auto i = v.attrs->find(sOutPath);
|
||||
if (i != v.attrs->end())
|
||||
return coerceToPath(pos, *i->value, context);
|
||||
}
|
||||
|
||||
throwTypeError(pos, "cannot coerce %1% to a path", v);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2370,7 +2352,9 @@ bool EvalState::eqValues(Value & v1, Value & v2)
|
|||
return strcmp(v1.string.s, v2.string.s) == 0;
|
||||
|
||||
case nPath:
|
||||
return strcmp(v1.path, v2.path) == 0;
|
||||
return
|
||||
v1._path.accessor == v2._path.accessor
|
||||
&& strcmp(v1._path.path, v2._path.path) == 0;
|
||||
|
||||
case nNull:
|
||||
return true;
|
||||
|
@ -2508,7 +2492,8 @@ void EvalState::printStats()
|
|||
else
|
||||
obj.attr("name", nullptr);
|
||||
if (auto pos = positions[fun->pos]) {
|
||||
obj.attr("file", (std::string_view) pos.file);
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj.attr("file", path->to_string());
|
||||
obj.attr("line", pos.line);
|
||||
obj.attr("column", pos.column);
|
||||
}
|
||||
|
@ -2520,7 +2505,8 @@ void EvalState::printStats()
|
|||
for (auto & i : attrSelects) {
|
||||
auto obj = list.object();
|
||||
if (auto pos = positions[i.first]) {
|
||||
obj.attr("file", (const std::string &) pos.file);
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj.attr("file", path->to_string());
|
||||
obj.attr("line", pos.line);
|
||||
obj.attr("column", pos.column);
|
||||
}
|
||||
|
@ -2585,6 +2571,23 @@ Strings EvalSettings::getDefaultNixPath()
|
|||
return res;
|
||||
}
|
||||
|
||||
bool EvalSettings::isPseudoUrl(std::string_view s)
|
||||
{
|
||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||
size_t pos = s.find("://");
|
||||
if (pos == std::string::npos) return false;
|
||||
std::string scheme(s, 0, pos);
|
||||
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
|
||||
}
|
||||
|
||||
std::string EvalSettings::resolvePseudoUrl(std::string_view url)
|
||||
{
|
||||
if (hasPrefix(url, "channel:"))
|
||||
return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz";
|
||||
else
|
||||
return std::string(url);
|
||||
}
|
||||
|
||||
EvalSettings evalSettings;
|
||||
|
||||
static GlobalConfig::Register rEvalSettings(&evalSettings);
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include "symbol-table.hh"
|
||||
#include "config.hh"
|
||||
#include "experimental-features.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
#include <map>
|
||||
#include <optional>
|
||||
|
@ -19,7 +20,9 @@ namespace nix {
|
|||
class Store;
|
||||
class EvalState;
|
||||
class StorePath;
|
||||
struct SourcePath;
|
||||
enum RepairFlag : bool;
|
||||
struct FSInputAccessor;
|
||||
|
||||
|
||||
typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
|
@ -55,16 +58,12 @@ std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const Stati
|
|||
void copyContext(const Value & v, PathSet & context);
|
||||
|
||||
|
||||
/* Cache for calls to addToStore(); maps source paths to the store
|
||||
paths. */
|
||||
typedef std::map<Path, StorePath> SrcToStore;
|
||||
|
||||
|
||||
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
|
||||
std::string printValue(const EvalState & state, const Value & v);
|
||||
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||
|
||||
|
||||
// FIXME: maybe change this to an std::variant<SourcePath, URL>.
|
||||
typedef std::pair<std::string, std::string> SearchPathElem;
|
||||
typedef std::list<SearchPathElem> SearchPath;
|
||||
|
||||
|
@ -78,7 +77,7 @@ struct RegexCache;
|
|||
std::shared_ptr<RegexCache> makeRegexCache();
|
||||
|
||||
struct DebugTrace {
|
||||
std::optional<ErrPos> pos;
|
||||
std::shared_ptr<AbstractPos> pos;
|
||||
const Expr & expr;
|
||||
const Env & env;
|
||||
hintformat hint;
|
||||
|
@ -93,8 +92,6 @@ public:
|
|||
SymbolTable symbols;
|
||||
PosTable positions;
|
||||
|
||||
static inline std::string derivationNixPath = "//builtin/derivation.nix";
|
||||
|
||||
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
||||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||
sFile, sLine, sColumn, sFunctor, sToString,
|
||||
|
@ -105,25 +102,31 @@ public:
|
|||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||
sPrefix,
|
||||
sOutputSpecified;
|
||||
Symbol sDerivationNix;
|
||||
|
||||
/* If set, force copying files to the Nix store even if they
|
||||
already exist there. */
|
||||
RepairFlag repair;
|
||||
|
||||
/* The allowed filesystem paths in restricted or pure evaluation
|
||||
mode. */
|
||||
std::optional<PathSet> allowedPaths;
|
||||
|
||||
Bindings emptyBindings;
|
||||
|
||||
const ref<FSInputAccessor> rootFS;
|
||||
const ref<MemoryInputAccessor> corepkgsFS;
|
||||
const ref<MemoryInputAccessor> internalFS;
|
||||
|
||||
const SourcePath derivationInternal;
|
||||
|
||||
const SourcePath callFlakeInternal;
|
||||
|
||||
/* A map keyed by InputAccessor::number that keeps input accessors
|
||||
alive. */
|
||||
std::unordered_map<size_t, ref<InputAccessor>> inputAccessors;
|
||||
|
||||
/* Store used to materialise .drv files. */
|
||||
const ref<Store> store;
|
||||
|
||||
/* Store used to build stuff. */
|
||||
const ref<Store> buildStore;
|
||||
|
||||
RootValue vCallFlake = nullptr;
|
||||
RootValue vImportedDrvToDerivation = nullptr;
|
||||
|
||||
/* Debugger */
|
||||
|
@ -171,30 +174,30 @@ public:
|
|||
|
||||
|
||||
private:
|
||||
SrcToStore srcToStore;
|
||||
|
||||
/* Cache for calls to addToStore(); maps source paths to the store
|
||||
paths. */
|
||||
std::map<SourcePath, StorePath> srcToStore;
|
||||
|
||||
/* A cache from path names to parse trees. */
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::map<Path, Expr *, std::less<Path>, traceable_allocator<std::pair<const Path, Expr *>>> FileParseCache;
|
||||
typedef std::map<SourcePath, Expr *, std::less<SourcePath>, traceable_allocator<std::pair<const SourcePath, Expr *>>> FileParseCache;
|
||||
#else
|
||||
typedef std::map<Path, Expr *> FileParseCache;
|
||||
typedef std::map<SourcePath, Expr *> FileParseCache;
|
||||
#endif
|
||||
FileParseCache fileParseCache;
|
||||
|
||||
/* A cache from path names to values. */
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::map<Path, Value, std::less<Path>, traceable_allocator<std::pair<const Path, Value>>> FileEvalCache;
|
||||
typedef std::map<SourcePath, Value, std::less<SourcePath>, traceable_allocator<std::pair<const SourcePath, Value>>> FileEvalCache;
|
||||
#else
|
||||
typedef std::map<Path, Value> FileEvalCache;
|
||||
typedef std::map<SourcePath, Value> FileEvalCache;
|
||||
#endif
|
||||
FileEvalCache fileEvalCache;
|
||||
|
||||
SearchPath searchPath;
|
||||
|
||||
std::map<std::string, std::pair<bool, std::string>> searchPathResolved;
|
||||
|
||||
/* Cache used by checkSourcePath(). */
|
||||
std::unordered_map<Path, Path> resolvedPaths;
|
||||
std::map<std::string, std::optional<SourcePath>> searchPathResolved;
|
||||
|
||||
/* Cache used by prim_match(). */
|
||||
std::shared_ptr<RegexCache> regexCache;
|
||||
|
@ -219,6 +222,22 @@ public:
|
|||
|
||||
SearchPath getSearchPath() { return searchPath; }
|
||||
|
||||
SourcePath rootPath(const Path & path);
|
||||
|
||||
void registerAccessor(ref<InputAccessor> accessor);
|
||||
|
||||
/* Convert a path to a string representation of the format
|
||||
`/__virtual__/<accessor-number>/<path>`. */
|
||||
std::string encodePath(const SourcePath & path);
|
||||
|
||||
/* Decode a path encoded by `encodePath()`. */
|
||||
SourcePath decodePath(std::string_view s, PosIdx pos = noPos);
|
||||
|
||||
/* Decode all virtual paths in a string, i.e. all
|
||||
/__virtual__/... substrings are replaced by the corresponding
|
||||
input accessor. */
|
||||
std::string decodePaths(std::string_view s);
|
||||
|
||||
/* Allow access to a path. */
|
||||
void allowPath(const Path & path);
|
||||
|
||||
|
@ -229,10 +248,6 @@ public:
|
|||
/* Allow access to a store path and return it as a string. */
|
||||
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
||||
|
||||
/* Check whether access to a path is allowed and throw an error if
|
||||
not. Otherwise return the canonicalised path. */
|
||||
Path checkSourcePath(const Path & path);
|
||||
|
||||
void checkURI(const std::string & uri);
|
||||
|
||||
/* When using a diverted store and 'path' is in the Nix store, map
|
||||
|
@ -245,36 +260,30 @@ public:
|
|||
Path toRealPath(const Path & path, const PathSet & context);
|
||||
|
||||
/* Parse a Nix expression from the specified file. */
|
||||
Expr * parseExprFromFile(const Path & path);
|
||||
Expr * parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parseExprFromFile(const SourcePath & path);
|
||||
Expr * parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
|
||||
/* Parse a Nix expression from the specified string. */
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath);
|
||||
Expr * parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parseExprFromString(std::string s, const SourcePath & basePath);
|
||||
|
||||
Expr * parseStdin();
|
||||
|
||||
/* Evaluate an expression read from the given file to normal
|
||||
form. Optionally enforce that the top-level expression is
|
||||
trivial (i.e. doesn't require arbitrary computation). */
|
||||
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
|
||||
|
||||
/* Like `evalFile`, but with an already parsed expression. */
|
||||
void cacheFile(
|
||||
const Path & path,
|
||||
const Path & resolvedPath,
|
||||
Expr * e,
|
||||
Value & v,
|
||||
bool mustBeTrivial = false);
|
||||
void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false);
|
||||
|
||||
void resetFileCache();
|
||||
|
||||
/* Look up a file in the search path. */
|
||||
Path findFile(const std::string_view path);
|
||||
Path findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||
SourcePath findFile(const std::string_view path);
|
||||
SourcePath findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||
|
||||
/* If the specified search path element is a URI, download it. */
|
||||
std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
|
||||
std::optional<SourcePath> resolveSearchPathElem(
|
||||
const SearchPathElem & elem,
|
||||
bool initAccessControl = false);
|
||||
|
||||
/* Evaluate an expression to normal form, storing the result in
|
||||
value `v'. */
|
||||
|
@ -323,7 +332,7 @@ public:
|
|||
[[gnu::noinline, gnu::noreturn]]
|
||||
void throwEvalError(const char * s, const std::string & s2);
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2);
|
||||
void throwEvalError(const PosIdx pos, const char * s, std::string_view s2);
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void throwEvalError(const char * s, const std::string & s2,
|
||||
Env & env, Expr & expr);
|
||||
|
@ -334,6 +343,8 @@ public:
|
|||
void throwEvalError(const char * s, const std::string & s2, const std::string & s3,
|
||||
Env & env, Expr & expr);
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void throwEvalError(const PosIdx pos, const char * s, std::string_view s2) const;
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3,
|
||||
Env & env, Expr & expr);
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
|
@ -397,15 +408,14 @@ public:
|
|||
booleans and lists to a string. If `copyToStore' is set,
|
||||
referenced paths are copied to the Nix store as a side effect. */
|
||||
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||
bool coerceMore = false, bool copyToStore = true,
|
||||
bool canonicalizePath = true);
|
||||
bool coerceMore = false, bool copyToStore = true);
|
||||
|
||||
std::string copyPathToStore(PathSet & context, const Path & path);
|
||||
StorePath copyPathToStore(PathSet & context, const SourcePath & path);
|
||||
|
||||
/* Path coercion. Converts strings, paths and derivations to a
|
||||
path. The result is guaranteed to be a canonicalised, absolute
|
||||
path. Nothing is copied to the store. */
|
||||
Path coerceToPath(const PosIdx pos, Value & v, PathSet & context);
|
||||
SourcePath coerceToPath(const PosIdx pos, Value & v, PathSet & context);
|
||||
|
||||
/* Like coerceToPath, but the result must be a store path. */
|
||||
StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context);
|
||||
|
@ -457,8 +467,12 @@ private:
|
|||
friend struct ExprAttrs;
|
||||
friend struct ExprLet;
|
||||
|
||||
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
|
||||
const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parse(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -573,7 +587,7 @@ std::string showType(const Value & v);
|
|||
NixStringContextElem decodeContext(const Store & store, std::string_view s);
|
||||
|
||||
/* If `path' refers to a directory, then append "/default.nix". */
|
||||
Path resolveExprPath(Path path);
|
||||
SourcePath resolveExprPath(const SourcePath & path);
|
||||
|
||||
struct InvalidPathError : EvalError
|
||||
{
|
||||
|
@ -590,6 +604,10 @@ struct EvalSettings : Config
|
|||
|
||||
static Strings getDefaultNixPath();
|
||||
|
||||
static bool isPseudoUrl(std::string_view s);
|
||||
|
||||
static std::string resolvePseudoUrl(std::string_view url);
|
||||
|
||||
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
|
||||
"Whether builtin functions that allow executing native code should be enabled."};
|
||||
|
||||
|
@ -660,8 +678,6 @@ struct EvalSettings : Config
|
|||
|
||||
extern EvalSettings evalSettings;
|
||||
|
||||
static const std::string corepkgsPrefix{"/__corepkgs__/"};
|
||||
|
||||
}
|
||||
|
||||
#include "eval-inline.hh"
|
||||
|
|
|
@ -1,46 +1,67 @@
|
|||
lockFileStr: rootSrc: rootSubdir:
|
||||
# This is a helper to callFlake() to lazily fetch flake inputs.
|
||||
|
||||
# The contents of the lock file, in JSON format.
|
||||
lockFileStr:
|
||||
|
||||
# A mapping of lock file node IDs to { sourceInfo, subdir } attrsets,
|
||||
# with sourceInfo.outPath providing an InputAccessor to a previously
|
||||
# fetched tree. This is necessary for possibly unlocked inputs, in
|
||||
# particular the root input, but also --override-inputs pointing to
|
||||
# unlocked trees.
|
||||
overrides:
|
||||
|
||||
let
|
||||
|
||||
lockFile = builtins.fromJSON lockFileStr;
|
||||
|
||||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput = inputSpec:
|
||||
if builtins.isList inputSpec
|
||||
then getInputByPath lockFile.root inputSpec
|
||||
else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath = nodeName: path:
|
||||
if path == []
|
||||
then nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
allNodes =
|
||||
builtins.mapAttrs
|
||||
(key: node:
|
||||
let
|
||||
|
||||
sourceInfo =
|
||||
if key == lockFile.root
|
||||
then rootSrc
|
||||
else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
if overrides ? ${key}
|
||||
then overrides.${key}.sourceInfo
|
||||
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/"
|
||||
then
|
||||
let
|
||||
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
|
||||
in parentNode.sourceInfo // {
|
||||
outPath = parentNode.sourceInfo.outPath + ("/" + node.locked.path);
|
||||
}
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
|
||||
subdir = if key == lockFile.root then rootSubdir else node.locked.dir or "";
|
||||
# With overrides, the accessor already points to the right subdirectory.
|
||||
subdir = if overrides ? ${key} then "" else node.locked.dir or "";
|
||||
|
||||
flake = import (sourceInfo + (if subdir != "" then "/" else "") + subdir + "/flake.nix");
|
||||
flake =
|
||||
import (sourceInfo.outPath + ((if subdir != "" then "/" else "") + subdir + "/flake.nix"));
|
||||
|
||||
inputs = builtins.mapAttrs
|
||||
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
|
||||
(node.inputs or {});
|
||||
|
||||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput = inputSpec:
|
||||
if builtins.isList inputSpec
|
||||
then getInputByPath lockFile.root inputSpec
|
||||
else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath = nodeName: path:
|
||||
if path == []
|
||||
then nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
|
||||
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; };
|
||||
|
|
|
@ -14,71 +14,12 @@ using namespace flake;
|
|||
|
||||
namespace flake {
|
||||
|
||||
typedef std::pair<fetchers::Tree, FlakeRef> FetchedFlake;
|
||||
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
|
||||
|
||||
static std::optional<FetchedFlake> lookupInFlakeCache(
|
||||
const FlakeCache & flakeCache,
|
||||
const FlakeRef & flakeRef)
|
||||
{
|
||||
// FIXME: inefficient.
|
||||
for (auto & i : flakeCache) {
|
||||
if (flakeRef == i.first) {
|
||||
debug("mapping '%s' to previously seen input '%s' -> '%s",
|
||||
flakeRef, i.first, i.second.second);
|
||||
return i.second;
|
||||
}
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
||||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool allowLookup,
|
||||
FlakeCache & flakeCache)
|
||||
{
|
||||
auto fetched = lookupInFlakeCache(flakeCache, originalRef);
|
||||
FlakeRef resolvedRef = originalRef;
|
||||
|
||||
if (!fetched) {
|
||||
if (originalRef.input.isDirect()) {
|
||||
fetched.emplace(originalRef.fetchTree(state.store));
|
||||
} else {
|
||||
if (allowLookup) {
|
||||
resolvedRef = originalRef.resolve(state.store);
|
||||
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
|
||||
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
|
||||
flakeCache.push_back({resolvedRef, *fetchedResolved});
|
||||
fetched.emplace(*fetchedResolved);
|
||||
}
|
||||
else {
|
||||
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
|
||||
}
|
||||
}
|
||||
flakeCache.push_back({originalRef, *fetched});
|
||||
}
|
||||
|
||||
auto [tree, lockedRef] = *fetched;
|
||||
|
||||
debug("got tree '%s' from '%s'",
|
||||
state.store->printStorePath(tree.storePath), lockedRef);
|
||||
|
||||
state.allowPath(tree.storePath);
|
||||
|
||||
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
|
||||
|
||||
return {std::move(tree), resolvedRef, lockedRef};
|
||||
}
|
||||
|
||||
static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos)
|
||||
{
|
||||
if (value.isThunk()) // HACK: always forceValue, even if not trivial
|
||||
state.forceValue(value, pos);
|
||||
}
|
||||
|
||||
|
||||
static void expectType(EvalState & state, ValueType type,
|
||||
Value & value, const PosIdx pos)
|
||||
{
|
||||
|
@ -89,12 +30,17 @@ static void expectType(EvalState & state, ValueType type,
|
|||
}
|
||||
|
||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||
EvalState & state, Value * value, const PosIdx pos,
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath);
|
||||
EvalState & state,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath);
|
||||
|
||||
static FlakeInput parseFlakeInput(EvalState & state,
|
||||
const std::string & inputName, Value * value, const PosIdx pos,
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||
static FlakeInput parseFlakeInput(
|
||||
EvalState & state,
|
||||
const std::string & inputName,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath)
|
||||
{
|
||||
expectType(state, nAttrs, *value, pos);
|
||||
|
||||
|
@ -118,7 +64,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
expectType(state, nBool, *attr.value, attr.pos);
|
||||
input.isFlake = attr.value->boolean;
|
||||
} else if (attr.name == sInputs) {
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootPath);
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, attr.pos);
|
||||
auto follows(parseInputPath(attr.value->string.s));
|
||||
|
@ -160,7 +106,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
if (!attrs.empty())
|
||||
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, state.positions[pos]);
|
||||
if (url)
|
||||
input.ref = parseFlakeRef(*url, baseDir, true, input.isFlake);
|
||||
input.ref = parseFlakeRef(*url, {}, true, input.isFlake);
|
||||
}
|
||||
|
||||
if (!input.follows && !input.ref)
|
||||
|
@ -170,8 +116,10 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
}
|
||||
|
||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||
EvalState & state, Value * value, const PosIdx pos,
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||
EvalState & state,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath)
|
||||
{
|
||||
std::map<FlakeId, FlakeInput> inputs;
|
||||
|
||||
|
@ -183,45 +131,38 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
state.symbols[inputAttr.name],
|
||||
inputAttr.value,
|
||||
inputAttr.pos,
|
||||
baseDir,
|
||||
lockRootPath));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
}
|
||||
|
||||
static Flake getFlake(
|
||||
static Flake readFlake(
|
||||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool allowLookup,
|
||||
FlakeCache & flakeCache,
|
||||
InputPath lockRootPath)
|
||||
const FlakeRef & resolvedRef,
|
||||
const FlakeRef & lockedRef,
|
||||
const SourcePath & rootDir,
|
||||
const InputPath & lockRootPath)
|
||||
{
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, originalRef, allowLookup, flakeCache);
|
||||
CanonPath flakeDir(resolvedRef.subdir);
|
||||
auto flakePath = rootDir + flakeDir + "flake.nix";
|
||||
|
||||
// Guard against symlink attacks.
|
||||
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true);
|
||||
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
|
||||
if (!isInDir(flakeFile, sourceInfo.actualPath))
|
||||
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
|
||||
lockedRef, state.store->printStorePath(sourceInfo.storePath));
|
||||
if (!flakePath.pathExists())
|
||||
throw Error("file '%s' does not exist", flakePath);
|
||||
|
||||
Value vInfo;
|
||||
state.evalFile(flakePath, vInfo, true);
|
||||
|
||||
expectType(state, nAttrs, vInfo, state.positions.add(Pos::Origin(rootDir), 1, 1));
|
||||
|
||||
Flake flake {
|
||||
.originalRef = originalRef,
|
||||
.resolvedRef = resolvedRef,
|
||||
.lockedRef = lockedRef,
|
||||
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
|
||||
.path = flakePath,
|
||||
};
|
||||
|
||||
if (!pathExists(flakeFile))
|
||||
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
|
||||
|
||||
Value vInfo;
|
||||
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
|
||||
|
||||
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile, foFile}, 0, 0));
|
||||
|
||||
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
||||
expectType(state, nString, *description->value, description->pos);
|
||||
flake.description = description->value->string.s;
|
||||
|
@ -230,7 +171,7 @@ static Flake getFlake(
|
|||
auto sInputs = state.symbols.create("inputs");
|
||||
|
||||
if (auto inputs = vInfo.attrs->get(sInputs))
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakeDir, lockRootPath);
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootPath);
|
||||
|
||||
auto sOutputs = state.symbols.create("outputs");
|
||||
|
||||
|
@ -247,7 +188,7 @@ static Flake getFlake(
|
|||
}
|
||||
|
||||
} else
|
||||
throw Error("flake '%s' lacks attribute 'outputs'", lockedRef);
|
||||
throw Error("flake '%s' lacks attribute 'outputs'", resolvedRef);
|
||||
|
||||
auto sNixConfig = state.symbols.create("nixConfig");
|
||||
|
||||
|
@ -264,7 +205,7 @@ static Flake getFlake(
|
|||
PathSet emptyContext = {};
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.coerceToString(setting.pos, *setting.value, emptyContext, false, true, true) .toOwned());
|
||||
state.coerceToString(setting.pos, *setting.value, emptyContext, false, true).toOwned());
|
||||
}
|
||||
else if (setting.value->type() == nInt)
|
||||
flake.config.settings.emplace(
|
||||
|
@ -296,21 +237,51 @@ static Flake getFlake(
|
|||
attr.name != sOutputs &&
|
||||
attr.name != sNixConfig)
|
||||
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
|
||||
lockedRef, state.symbols[attr.name], state.positions[attr.pos]);
|
||||
resolvedRef, state.symbols[attr.name], state.positions[attr.pos]);
|
||||
}
|
||||
|
||||
return flake;
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
|
||||
static FlakeRef maybeResolve(
|
||||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool useRegistries)
|
||||
{
|
||||
return getFlake(state, originalRef, allowLookup, flakeCache, {});
|
||||
if (!originalRef.input.isDirect()) {
|
||||
if (!useRegistries)
|
||||
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
|
||||
return originalRef.resolve(state.store);
|
||||
} else
|
||||
return originalRef;
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
|
||||
static Flake getFlake(
|
||||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool useRegistries,
|
||||
const InputPath & lockRootPath)
|
||||
{
|
||||
FlakeCache flakeCache;
|
||||
return getFlake(state, originalRef, allowLookup, flakeCache);
|
||||
auto resolvedRef = maybeResolve(state, originalRef, useRegistries);
|
||||
|
||||
auto [accessor, lockedRef] = resolvedRef.lazyFetch(state.store);
|
||||
|
||||
state.registerAccessor(accessor);
|
||||
|
||||
return readFlake(state, originalRef, resolvedRef, lockedRef, SourcePath {accessor, CanonPath::root}, lockRootPath);
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries)
|
||||
{
|
||||
return getFlake(state, originalRef, useRegistries, {});
|
||||
}
|
||||
|
||||
static LockFile readLockFile(const Flake & flake)
|
||||
{
|
||||
auto lockFilePath = flake.path.parent() + "flake.lock";
|
||||
return lockFilePath.pathExists()
|
||||
? LockFile(lockFilePath.readFile(), fmt("%s", lockFilePath))
|
||||
: LockFile();
|
||||
}
|
||||
|
||||
/* Compute an in-memory lock file for the specified top-level flake,
|
||||
|
@ -322,30 +293,32 @@ LockedFlake lockFlake(
|
|||
{
|
||||
settings.requireExperimentalFeature(Xp::Flakes);
|
||||
|
||||
FlakeCache flakeCache;
|
||||
|
||||
auto useRegistries = lockFlags.useRegistries.value_or(fetchSettings.useRegistries);
|
||||
|
||||
auto flake = getFlake(state, topRef, useRegistries, flakeCache);
|
||||
auto flake = std::make_unique<Flake>(getFlake(state, topRef, useRegistries, {}));
|
||||
|
||||
if (lockFlags.applyNixConfig) {
|
||||
flake.config.apply();
|
||||
flake->config.apply();
|
||||
state.store->setOptions();
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
// FIXME: symlink attack
|
||||
auto oldLockFile = LockFile::read(
|
||||
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
|
||||
auto oldLockFile = readLockFile(*flake);
|
||||
|
||||
debug("old lock file: %s", oldLockFile);
|
||||
|
||||
std::map<InputPath, FlakeInput> overrides;
|
||||
std::map<InputPath, std::tuple<FlakeInput, SourcePath, std::optional<InputPath>>> overrides;
|
||||
std::set<InputPath> overridesUsed, updatesUsed;
|
||||
std::map<ref<Node>, SourcePath> nodePaths;
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
|
||||
overrides.emplace(
|
||||
i.first,
|
||||
std::make_tuple(
|
||||
FlakeInput { .ref = i.second },
|
||||
state.rootPath("/"),
|
||||
std::nullopt));
|
||||
|
||||
LockFile newLockFile;
|
||||
|
||||
|
@ -353,21 +326,32 @@ LockedFlake lockFlake(
|
|||
|
||||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
ref<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const InputPath & lockRootPath,
|
||||
const Path & parentPath,
|
||||
const InputPath & followsPrefix,
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)>
|
||||
computeLocks;
|
||||
|
||||
computeLocks = [&](
|
||||
/* The inputs of this node, either from flake.nix or
|
||||
flake.lock */
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
/* The node whose locks are to be updated.*/
|
||||
ref<Node> node,
|
||||
/* The path to this node in the lock file graph. */
|
||||
const InputPath & inputPathPrefix,
|
||||
/* The old node, if any, from which locks can be
|
||||
copied. */
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const InputPath & lockRootPath,
|
||||
const Path & parentPath,
|
||||
/* The prefix relative to which 'follows' should be
|
||||
interpreted. When a node is initially locked, it's
|
||||
relative to the node's flake; when it's already locked,
|
||||
it's relative to the root of the lock file. */
|
||||
const InputPath & followsPrefix,
|
||||
/* The source path of this node's flake. */
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
|
@ -379,7 +363,8 @@ LockedFlake lockFlake(
|
|||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
inputPath.push_back(idOverride);
|
||||
overrides.insert_or_assign(inputPath, inputOverride);
|
||||
overrides.emplace(inputPath,
|
||||
std::make_tuple(inputOverride, sourcePath, inputPathPrefix));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -410,13 +395,18 @@ LockedFlake lockFlake(
|
|||
ancestors? */
|
||||
auto i = overrides.find(inputPath);
|
||||
bool hasOverride = i != overrides.end();
|
||||
if (hasOverride) {
|
||||
if (hasOverride)
|
||||
overridesUsed.insert(inputPath);
|
||||
// Respect the “flakeness” of the input even if we
|
||||
// override it
|
||||
i->second.isFlake = input2.isFlake;
|
||||
}
|
||||
auto & input = hasOverride ? i->second : input2;
|
||||
auto input = hasOverride ? std::get<0>(i->second) : input2;
|
||||
|
||||
/* Resolve relative 'path:' inputs relative to
|
||||
the source path of the overrider. */
|
||||
auto overridenSourcePath = hasOverride ? std::get<1>(i->second) : sourcePath;
|
||||
|
||||
/* Respect the "flakeness" of the input even if we
|
||||
override it. */
|
||||
if (hasOverride)
|
||||
input.isFlake = input2.isFlake;
|
||||
|
||||
/* Resolve 'follows' later (since it may refer to an input
|
||||
path we haven't processed yet. */
|
||||
|
@ -432,6 +422,25 @@ LockedFlake lockFlake(
|
|||
|
||||
assert(input.ref);
|
||||
|
||||
auto overridenParentPath =
|
||||
input.ref->input.isRelative()
|
||||
? std::optional<InputPath>(hasOverride ? std::get<2>(i->second) : inputPathPrefix)
|
||||
: std::nullopt;
|
||||
|
||||
/* Get the input flake, resolve 'path:./...'
|
||||
flakerefs relative to the parent flake. */
|
||||
auto getInputFlake = [&]()
|
||||
{
|
||||
if (auto relativePath = input.ref->input.isRelative()) {
|
||||
SourcePath inputSourcePath {
|
||||
overridenSourcePath.accessor,
|
||||
CanonPath(*relativePath, *overridenSourcePath.path.parent())
|
||||
};
|
||||
return readFlake(state, *input.ref, *input.ref, *input.ref, inputSourcePath, inputPath);
|
||||
} else
|
||||
return getFlake(state, *input.ref, useRegistries, inputPath);
|
||||
};
|
||||
|
||||
/* Do we have an entry in the existing lock file? And we
|
||||
don't have a --update-input flag for this input? */
|
||||
std::shared_ptr<LockedNode> oldLock;
|
||||
|
@ -445,6 +454,7 @@ LockedFlake lockFlake(
|
|||
|
||||
if (oldLock
|
||||
&& oldLock->originalRef == *input.ref
|
||||
&& oldLock->parentPath == overridenParentPath
|
||||
&& !hasOverride)
|
||||
{
|
||||
debug("keeping existing input '%s'", inputPathS);
|
||||
|
@ -452,8 +462,9 @@ LockedFlake lockFlake(
|
|||
/* Copy the input from the old lock since its flakeref
|
||||
didn't change and there is no override from a
|
||||
higher level flake. */
|
||||
auto childNode = std::make_shared<LockedNode>(
|
||||
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
|
||||
auto childNode = make_ref<LockedNode>(
|
||||
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake,
|
||||
oldLock->parentPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
|
@ -481,7 +492,7 @@ LockedFlake lockFlake(
|
|||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||
if (! trustLock) {
|
||||
if (!trustLock) {
|
||||
// It is possible that the flake has changed,
|
||||
// so we must confirm all the follows that are in the lock file are also in the flake.
|
||||
auto overridePath(inputPath);
|
||||
|
@ -496,7 +507,7 @@ LockedFlake lockFlake(
|
|||
break;
|
||||
}
|
||||
}
|
||||
auto absoluteFollows(lockRootPath);
|
||||
auto absoluteFollows(followsPrefix);
|
||||
absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end());
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.follows = absoluteFollows,
|
||||
|
@ -505,24 +516,26 @@ LockedFlake lockFlake(
|
|||
}
|
||||
}
|
||||
|
||||
auto localPath(parentPath);
|
||||
// If this input is a path, recurse it down.
|
||||
// This allows us to resolve path inputs relative to the current flake.
|
||||
if ((*input.ref).input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
computeLocks(
|
||||
mustRefetch
|
||||
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
|
||||
: fakeInputs,
|
||||
childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
|
||||
if (mustRefetch) {
|
||||
auto inputFlake = getInputFlake();
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix,
|
||||
inputFlake.path, !mustRefetch);
|
||||
} else {
|
||||
// FIXME: sourcePath is wrong here, we
|
||||
// should pass a lambda that lazily
|
||||
// fetches the parent flake if needed
|
||||
// (i.e. getInputFlake()).
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock, followsPrefix, sourcePath, !mustRefetch);
|
||||
}
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
|
||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||
if (!lockFlags.allowUnlocked && !input.ref->input.isLocked() && !input.ref->input.isRelative())
|
||||
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
|
@ -534,17 +547,11 @@ LockedFlake lockFlake(
|
|||
auto ref = input2.ref ? *input2.ref : *input.ref;
|
||||
|
||||
if (input.isFlake) {
|
||||
Path localPath = parentPath;
|
||||
FlakeRef localRef = *input.ref;
|
||||
auto inputFlake = getInputFlake();
|
||||
|
||||
// If this input is a path, recurse it down.
|
||||
// This allows us to resolve path inputs relative to the current flake.
|
||||
if (localRef.input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
|
||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
||||
|
||||
auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
|
||||
auto childNode = make_ref<LockedNode>(
|
||||
inputFlake.lockedRef, ref, true,
|
||||
overridenParentPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
|
@ -559,20 +566,27 @@ LockedFlake lockFlake(
|
|||
flake. Also, unless we already have this flake
|
||||
in the top-level lock file, use this flake's
|
||||
own lock file. */
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputPath,
|
||||
oldLock
|
||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: LockFile::read(
|
||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
|
||||
oldLock ? lockRootPath : inputPath, localPath, false);
|
||||
: (std::shared_ptr<Node>) readLockFile(inputFlake).root,
|
||||
oldLock ? followsPrefix : inputPath,
|
||||
inputFlake.path,
|
||||
false);
|
||||
}
|
||||
|
||||
else {
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, *input.ref, useRegistries, flakeCache);
|
||||
node->inputs.insert_or_assign(id,
|
||||
std::make_shared<LockedNode>(lockedRef, ref, false));
|
||||
auto resolvedRef = maybeResolve(state, *input.ref, useRegistries);
|
||||
|
||||
auto [accessor, lockedRef] = resolvedRef.lazyFetch(state.store);
|
||||
|
||||
auto childNode = make_ref<LockedNode>(lockedRef, ref, false, overridenParentPath);
|
||||
|
||||
nodePaths.emplace(childNode, accessor->root());
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -583,12 +597,16 @@ LockedFlake lockFlake(
|
|||
}
|
||||
};
|
||||
|
||||
// Bring in the current ref for relative path resolution if we have it
|
||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
||||
nodePaths.emplace(newLockFile.root, flake->path.parent());
|
||||
|
||||
computeLocks(
|
||||
flake.inputs, newLockFile.root, {},
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
|
||||
flake->inputs,
|
||||
newLockFile.root,
|
||||
{},
|
||||
lockFlags.recreateLockFile ? nullptr : (std::shared_ptr<Node>) oldLockFile.root,
|
||||
{},
|
||||
flake->path,
|
||||
false);
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
|
@ -610,82 +628,68 @@ LockedFlake lockFlake(
|
|||
auto diff = LockFile::diff(oldLockFile, newLockFile);
|
||||
|
||||
if (lockFlags.writeLockFile) {
|
||||
if (auto sourcePath = topRef.input.getSourcePath()) {
|
||||
if (!newLockFile.isImmutable()) {
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||
if (auto unlockedInput = newLockFile.isUnlocked()) {
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||
|
||||
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
|
||||
auto path = flake->path.parent() + "flake.lock";
|
||||
|
||||
auto path = *sourcePath + "/" + relPath;
|
||||
bool lockFileExists = path.pathExists();
|
||||
|
||||
bool lockFileExists = pathExists(path);
|
||||
if (lockFileExists) {
|
||||
auto s = chomp(diff);
|
||||
if (s.empty())
|
||||
warn("updating lock file '%s'", path);
|
||||
else
|
||||
warn("updating lock file '%s':\n%s", path, s);
|
||||
} else
|
||||
warn("creating lock file '%s'", path);
|
||||
|
||||
if (lockFileExists) {
|
||||
auto s = chomp(diff);
|
||||
if (s.empty())
|
||||
warn("updating lock file '%s'", path);
|
||||
else
|
||||
warn("updating lock file '%s':\n%s", path, s);
|
||||
} else
|
||||
warn("creating lock file '%s'", path);
|
||||
std::optional<std::string> commitMessage = std::nullopt;
|
||||
if (lockFlags.commitLockFile) {
|
||||
std::string cm;
|
||||
|
||||
newLockFile.write(path);
|
||||
cm = fetchSettings.commitLockFileSummary.get();
|
||||
|
||||
std::optional<std::string> commitMessage = std::nullopt;
|
||||
if (lockFlags.commitLockFile) {
|
||||
std::string cm;
|
||||
if (cm == "")
|
||||
cm = fmt("%s: %s", path.path.rel(), lockFileExists ? "Update" : "Add");
|
||||
|
||||
cm = fetchSettings.commitLockFileSummary.get();
|
||||
|
||||
if (cm == "") {
|
||||
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
|
||||
}
|
||||
|
||||
cm += "\n\nFlake lock file updates:\n\n";
|
||||
cm += filterANSIEscapes(diff, true);
|
||||
commitMessage = cm;
|
||||
}
|
||||
|
||||
topRef.input.markChangedFile(
|
||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||
commitMessage);
|
||||
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
also just clear the 'rev' field... */
|
||||
auto prevLockedRef = flake.lockedRef;
|
||||
FlakeCache dummyCache;
|
||||
flake = getFlake(state, topRef, useRegistries, dummyCache);
|
||||
|
||||
if (lockFlags.commitLockFile &&
|
||||
flake.lockedRef.input.getRev() &&
|
||||
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
|
||||
|
||||
/* Make sure that we picked up the change,
|
||||
i.e. the tree should usually be dirty
|
||||
now. Corner case: we could have reverted from a
|
||||
dirty to a clean tree! */
|
||||
if (flake.lockedRef.input == prevLockedRef.input
|
||||
&& !flake.lockedRef.input.isLocked())
|
||||
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||
cm += "\n\nFlake lock file updates:\n\n";
|
||||
cm += filterANSIEscapes(diff, true);
|
||||
commitMessage = cm;
|
||||
}
|
||||
} else
|
||||
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
|
||||
topRef.input.putFile(path.path, fmt("%s\n", newLockFile), commitMessage);
|
||||
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
also just clear the 'rev' field... */
|
||||
auto prevLockedRef = flake->lockedRef;
|
||||
flake = std::make_unique<Flake>(getFlake(state, topRef, useRegistries));
|
||||
|
||||
if (lockFlags.commitLockFile &&
|
||||
flake->lockedRef.input.getRev() &&
|
||||
prevLockedRef.input.getRev() != flake->lockedRef.input.getRev())
|
||||
warn("committed new revision '%s'", flake->lockedRef.input.getRev()->gitRev());
|
||||
}
|
||||
} else {
|
||||
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
|
||||
flake.forceDirty = true;
|
||||
flake->forceDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
|
||||
return LockedFlake {
|
||||
.flake = std::move(*flake),
|
||||
.lockFile = std::move(newLockFile),
|
||||
.nodePaths = std::move(nodePaths)
|
||||
};
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
|
||||
if (flake)
|
||||
e.addTrace({}, "while updating the lock file of flake '%s'", flake->lockedRef.to_string());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -694,34 +698,42 @@ void callFlake(EvalState & state,
|
|||
const LockedFlake & lockedFlake,
|
||||
Value & vRes)
|
||||
{
|
||||
auto vLocks = state.allocValue();
|
||||
auto vRootSrc = state.allocValue();
|
||||
auto vRootSubdir = state.allocValue();
|
||||
auto vTmp1 = state.allocValue();
|
||||
auto vTmp2 = state.allocValue();
|
||||
auto [lockFileStr, keyMap] = lockedFlake.lockFile.to_string();
|
||||
|
||||
vLocks->mkString(lockedFlake.lockFile.to_string());
|
||||
auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
|
||||
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
*lockedFlake.flake.sourceInfo,
|
||||
lockedFlake.flake.lockedRef.input,
|
||||
*vRootSrc,
|
||||
false,
|
||||
lockedFlake.flake.forceDirty);
|
||||
for (auto & [node, sourcePath] : lockedFlake.nodePaths) {
|
||||
auto override = state.buildBindings(2);
|
||||
|
||||
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
|
||||
auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
|
||||
|
||||
if (!state.vCallFlake) {
|
||||
state.vCallFlake = allocRootValue(state.allocValue());
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "call-flake.nix.gen.hh"
|
||||
, "/"), **state.vCallFlake);
|
||||
auto lockedNode = node.dynamic_pointer_cast<const LockedNode>();
|
||||
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
sourcePath,
|
||||
lockedNode ? lockedNode->lockedRef.input : lockedFlake.flake.lockedRef.input,
|
||||
vSourceInfo,
|
||||
false,
|
||||
!lockedNode && lockedFlake.flake.forceDirty);
|
||||
|
||||
auto key = keyMap.find(node);
|
||||
assert(key != keyMap.end());
|
||||
|
||||
overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
|
||||
}
|
||||
|
||||
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
|
||||
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
|
||||
auto & vOverrides = state.allocValue()->mkAttrs(overrides);
|
||||
|
||||
auto vCallFlake = state.allocValue();
|
||||
state.evalFile(state.callFlakeInternal, *vCallFlake);
|
||||
|
||||
auto vTmp1 = state.allocValue();
|
||||
auto vLocks = state.allocValue();
|
||||
vLocks->mkString(lockFileStr);
|
||||
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
|
||||
state.callFunction(*vTmp1, vOverrides, vRes, noPos);
|
||||
}
|
||||
|
||||
static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
|
@ -737,7 +749,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
.updateLockFile = false,
|
||||
.writeLockFile = false,
|
||||
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
|
||||
.allowMutable = !evalSettings.pureEval,
|
||||
.allowUnlocked = !evalSettings.pureEval,
|
||||
}),
|
||||
v);
|
||||
}
|
||||
|
@ -769,18 +781,17 @@ static RegisterPrimOp r2({
|
|||
|
||||
}
|
||||
|
||||
Fingerprint LockedFlake::getFingerprint() const
|
||||
std::optional<Fingerprint> LockedFlake::getFingerprint(ref<Store> store) const
|
||||
{
|
||||
if (lockFile.isUnlocked()) return std::nullopt;
|
||||
|
||||
auto fingerprint = flake.lockedRef.input.getFingerprint(store);
|
||||
if (!fingerprint) return std::nullopt;
|
||||
|
||||
// FIXME: as an optimization, if the flake contains a lock file
|
||||
// and we haven't changed it, then it's sufficient to use
|
||||
// flake.sourceInfo.storePath for the fingerprint.
|
||||
return hashString(htSHA256,
|
||||
fmt("%s;%s;%d;%d;%s",
|
||||
flake.sourceInfo->storePath.to_string(),
|
||||
flake.lockedRef.subdir,
|
||||
flake.lockedRef.input.getRevCount().value_or(0),
|
||||
flake.lockedRef.input.getLastModified().value_or(0),
|
||||
lockFile));
|
||||
return hashString(htSHA256, fmt("%s;%s;%s", *fingerprint, flake.lockedRef.subdir, lockFile));
|
||||
}
|
||||
|
||||
Flake::~Flake() { }
|
||||
|
|
|
@ -61,9 +61,9 @@ struct Flake
|
|||
FlakeRef originalRef; // the original flake specification (by the user)
|
||||
FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
|
||||
FlakeRef lockedRef; // the specific local store result of invoking the fetcher
|
||||
SourcePath path;
|
||||
bool forceDirty = false; // pretend that 'lockedRef' is dirty
|
||||
std::optional<std::string> description;
|
||||
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
||||
FlakeInputs inputs;
|
||||
ConfigFile config; // 'nixConfig' attribute
|
||||
~Flake();
|
||||
|
@ -79,7 +79,12 @@ struct LockedFlake
|
|||
Flake flake;
|
||||
LockFile lockFile;
|
||||
|
||||
Fingerprint getFingerprint() const;
|
||||
/* Source tree accessors for nodes that have been fetched in
|
||||
lockFlake(); in particular, the root node and the overriden
|
||||
inputs. */
|
||||
std::map<ref<Node>, SourcePath> nodePaths;
|
||||
|
||||
std::optional<Fingerprint> getFingerprint(ref<Store> store) const;
|
||||
};
|
||||
|
||||
struct LockFlags
|
||||
|
@ -108,11 +113,11 @@ struct LockFlags
|
|||
|
||||
bool applyNixConfig = false;
|
||||
|
||||
/* Whether mutable flake references (i.e. those without a Git
|
||||
/* Whether unlocked flake references (i.e. those without a Git
|
||||
revision or similar) without a corresponding lock are
|
||||
allowed. Mutable flake references with a lock are always
|
||||
allowed. Unlocked flake references with a lock are always
|
||||
allowed. */
|
||||
bool allowMutable = true;
|
||||
bool allowUnlocked = true;
|
||||
|
||||
/* Whether to commit changes to flake.lock. */
|
||||
bool commitLockFile = false;
|
||||
|
@ -139,7 +144,7 @@ void callFlake(
|
|||
|
||||
void emitTreeAttrs(
|
||||
EvalState & state,
|
||||
const fetchers::Tree & tree,
|
||||
const SourcePath & path,
|
||||
const fetchers::Input & input,
|
||||
Value & v,
|
||||
bool emptyRevFallback = false,
|
||||
|
|
|
@ -92,6 +92,15 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
|
||||
std::smatch match;
|
||||
|
||||
auto fromParsedURL = [&](ParsedURL && parsedURL)
|
||||
{
|
||||
auto dir = getOr(parsedURL.query, "dir", "");
|
||||
parsedURL.query.erase("dir");
|
||||
std::string fragment;
|
||||
std::swap(fragment, parsedURL.fragment);
|
||||
return std::make_pair(FlakeRef(Input::fromURL(parsedURL), dir), fragment);
|
||||
};
|
||||
|
||||
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
||||
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
||||
|
||||
|
@ -112,6 +121,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
else if (std::regex_match(url, match, pathUrlRegex)) {
|
||||
std::string path = match[1];
|
||||
std::string fragment = percentDecode(match.str(3));
|
||||
auto query = decodeQuery(match[2]);
|
||||
|
||||
if (baseDir) {
|
||||
/* Check if 'url' is a path (either absolute or relative
|
||||
|
@ -163,7 +173,8 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = decodeQuery(match[2]),
|
||||
.query = query,
|
||||
.fragment = fragment,
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
|
@ -175,9 +186,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
|
||||
fragment);
|
||||
return fromParsedURL(std::move(parsedURL));
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
|
@ -188,29 +197,21 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
} else {
|
||||
if (!hasPrefix(path, "/"))
|
||||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||
auto query = decodeQuery(match[2]);
|
||||
path = canonPath(path + "/" + getOr(query, "dir", ""));
|
||||
}
|
||||
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "path");
|
||||
attrs.insert_or_assign("path", path);
|
||||
|
||||
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
|
||||
return fromParsedURL({
|
||||
.url = path, // FIXME
|
||||
.base = path,
|
||||
.scheme = "path",
|
||||
.authority = "",
|
||||
.path = path,
|
||||
.query = query,
|
||||
.fragment = fragment
|
||||
});
|
||||
}
|
||||
|
||||
else {
|
||||
auto parsedURL = parseURL(url);
|
||||
std::string fragment;
|
||||
std::swap(fragment, parsedURL.fragment);
|
||||
|
||||
auto input = Input::fromURL(parsedURL);
|
||||
input.parent = baseDir;
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
|
||||
fragment);
|
||||
}
|
||||
else
|
||||
return fromParsedURL(parseURL(url));
|
||||
}
|
||||
|
||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||
|
@ -232,10 +233,10 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
|
|||
fetchers::maybeGetStrAttr(attrs, "dir").value_or(""));
|
||||
}
|
||||
|
||||
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||
std::pair<ref<InputAccessor>, FlakeRef> FlakeRef::lazyFetch(ref<Store> store) const
|
||||
{
|
||||
auto [tree, lockedInput] = input.fetch(store);
|
||||
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||
auto [accessor, lockedInput] = input.getAccessor(store);
|
||||
return {accessor, FlakeRef(std::move(lockedInput), subdir)};
|
||||
}
|
||||
|
||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
||||
|
|
|
@ -35,7 +35,7 @@ typedef std::string FlakeId;
|
|||
|
||||
struct FlakeRef
|
||||
{
|
||||
/* fetcher-specific representation of the input, sufficient to
|
||||
/* Fetcher-specific representation of the input, sufficient to
|
||||
perform the fetch operation. */
|
||||
fetchers::Input input;
|
||||
|
||||
|
@ -57,7 +57,7 @@ struct FlakeRef
|
|||
|
||||
static FlakeRef fromAttrs(const fetchers::Attrs & attrs);
|
||||
|
||||
std::pair<fetchers::Tree, FlakeRef> fetchTree(ref<Store> store) const;
|
||||
std::pair<ref<InputAccessor>, FlakeRef> lazyFetch(ref<Store> store) const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||
|
|
|
@ -31,30 +31,26 @@ FlakeRef getFlakeRef(
|
|||
}
|
||||
|
||||
LockedNode::LockedNode(const nlohmann::json & json)
|
||||
: lockedRef(getFlakeRef(json, "locked", "info"))
|
||||
: lockedRef(getFlakeRef(json, "locked", "info")) // FIXME: remove "info"
|
||||
, originalRef(getFlakeRef(json, "original", nullptr))
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
, parentPath(json.find("parent") != json.end() ? (std::optional<InputPath>) json["parent"] : std::nullopt)
|
||||
{
|
||||
if (!lockedRef.input.isLocked())
|
||||
throw Error("lock file contains mutable lock '%s'",
|
||||
if (!lockedRef.input.isLocked() && !lockedRef.input.isRelative())
|
||||
throw Error("lock file contains unlocked input '%s'",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
}
|
||||
|
||||
StorePath LockedNode::computeStorePath(Store & store) const
|
||||
{
|
||||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||
{
|
||||
auto pos = root;
|
||||
std::shared_ptr<Node> pos = root;
|
||||
|
||||
if (!pos) return {};
|
||||
|
||||
for (auto & elem : path) {
|
||||
if (auto i = get(pos->inputs, elem)) {
|
||||
if (auto node = std::get_if<0>(&*i))
|
||||
pos = *node;
|
||||
pos = (std::shared_ptr<LockedNode>) *node;
|
||||
else if (auto follows = std::get_if<1>(&*i)) {
|
||||
pos = findInput(*follows);
|
||||
if (!pos) return {};
|
||||
|
@ -66,13 +62,15 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
|||
return pos;
|
||||
}
|
||||
|
||||
LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||
LockFile::LockFile(std::string_view contents, std::string_view path)
|
||||
{
|
||||
auto json = nlohmann::json::parse(contents);
|
||||
|
||||
auto version = json.value("version", 0);
|
||||
if (version < 5 || version > 7)
|
||||
throw Error("lock file '%s' has unsupported version %d", path, version);
|
||||
|
||||
std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap;
|
||||
std::map<std::string, ref<Node>> nodeMap;
|
||||
|
||||
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
|
||||
|
||||
|
@ -93,12 +91,12 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
|||
auto jsonNode2 = nodes.find(inputKey);
|
||||
if (jsonNode2 == nodes.end())
|
||||
throw Error("lock file references missing node '%s'", inputKey);
|
||||
auto input = std::make_shared<LockedNode>(*jsonNode2);
|
||||
auto input = make_ref<LockedNode>(*jsonNode2);
|
||||
k = nodeMap.insert_or_assign(inputKey, input).first;
|
||||
getInputs(*input, *jsonNode2);
|
||||
}
|
||||
if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second))
|
||||
node.inputs.insert_or_assign(i.key(), child);
|
||||
if (auto child = k->second.dynamic_pointer_cast<LockedNode>())
|
||||
node.inputs.insert_or_assign(i.key(), ref(child));
|
||||
else
|
||||
// FIXME: replace by follows node
|
||||
throw Error("lock file contains cycle to root node");
|
||||
|
@ -116,15 +114,15 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
|||
// a bit since we don't need to worry about cycles.
|
||||
}
|
||||
|
||||
nlohmann::json LockFile::toJSON() const
|
||||
std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
||||
{
|
||||
nlohmann::json nodes;
|
||||
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
|
||||
KeyMap nodeKeys;
|
||||
std::unordered_set<std::string> keys;
|
||||
|
||||
std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode;
|
||||
std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
|
||||
|
||||
dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string
|
||||
dumpNode = [&](std::string key, ref<const Node> node) -> std::string
|
||||
{
|
||||
auto k = nodeKeys.find(node);
|
||||
if (k != nodeKeys.end())
|
||||
|
@ -159,10 +157,13 @@ nlohmann::json LockFile::toJSON() const
|
|||
n["inputs"] = std::move(inputs);
|
||||
}
|
||||
|
||||
if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) {
|
||||
if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
|
||||
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
|
||||
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
|
||||
if (!lockedNode->isFlake) n["flake"] = false;
|
||||
if (!lockedNode->isFlake)
|
||||
n["flake"] = false;
|
||||
if (lockedNode->parentPath)
|
||||
n["parent"] = *lockedNode->parentPath;
|
||||
}
|
||||
|
||||
nodes[key] = std::move(n);
|
||||
|
@ -175,39 +176,28 @@ nlohmann::json LockFile::toJSON() const
|
|||
json["root"] = dumpNode("root", root);
|
||||
json["nodes"] = std::move(nodes);
|
||||
|
||||
return json;
|
||||
return {json, std::move(nodeKeys)};
|
||||
}
|
||||
|
||||
std::string LockFile::to_string() const
|
||||
std::pair<std::string, LockFile::KeyMap> LockFile::to_string() const
|
||||
{
|
||||
return toJSON().dump(2);
|
||||
}
|
||||
|
||||
LockFile LockFile::read(const Path & path)
|
||||
{
|
||||
if (!pathExists(path)) return LockFile();
|
||||
return LockFile(nlohmann::json::parse(readFile(path)), path);
|
||||
auto [json, nodeKeys] = toJSON();
|
||||
return {json.dump(2), std::move(nodeKeys)};
|
||||
}
|
||||
|
||||
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
|
||||
{
|
||||
stream << lockFile.toJSON().dump(2);
|
||||
stream << lockFile.toJSON().first.dump(2);
|
||||
return stream;
|
||||
}
|
||||
|
||||
void LockFile::write(const Path & path) const
|
||||
std::optional<FlakeRef> LockFile::isUnlocked() const
|
||||
{
|
||||
createDirs(dirOf(path));
|
||||
writeFile(path, fmt("%s\n", *this));
|
||||
}
|
||||
std::set<ref<const Node>> nodes;
|
||||
|
||||
bool LockFile::isImmutable() const
|
||||
{
|
||||
std::unordered_set<std::shared_ptr<const Node>> nodes;
|
||||
std::function<void(ref<const Node> node)> visit;
|
||||
|
||||
std::function<void(std::shared_ptr<const Node> node)> visit;
|
||||
|
||||
visit = [&](std::shared_ptr<const Node> node)
|
||||
visit = [&](ref<const Node> node)
|
||||
{
|
||||
if (!nodes.insert(node).second) return;
|
||||
for (auto & i : node->inputs)
|
||||
|
@ -219,17 +209,20 @@ bool LockFile::isImmutable() const
|
|||
|
||||
for (auto & i : nodes) {
|
||||
if (i == root) continue;
|
||||
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
|
||||
if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false;
|
||||
auto node = i.dynamic_pointer_cast<const LockedNode>();
|
||||
if (node
|
||||
&& !node->lockedRef.input.isLocked()
|
||||
&& !node->lockedRef.input.isRelative())
|
||||
return node->lockedRef;
|
||||
}
|
||||
|
||||
return true;
|
||||
return {};
|
||||
}
|
||||
|
||||
bool LockFile::operator ==(const LockFile & other) const
|
||||
{
|
||||
// FIXME: slow
|
||||
return toJSON() == other.toJSON();
|
||||
return toJSON().first == other.toJSON().first;
|
||||
}
|
||||
|
||||
InputPath parseInputPath(std::string_view s)
|
||||
|
@ -247,12 +240,12 @@ InputPath parseInputPath(std::string_view s)
|
|||
|
||||
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
|
||||
{
|
||||
std::unordered_set<std::shared_ptr<Node>> done;
|
||||
std::set<ref<Node>> done;
|
||||
std::map<InputPath, Node::Edge> res;
|
||||
|
||||
std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse;
|
||||
std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
|
||||
|
||||
recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node)
|
||||
recurse = [&](const InputPath & prefix, ref<Node> node)
|
||||
{
|
||||
if (!done.insert(node).second) return;
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ struct LockedNode;
|
|||
type LockedNode. */
|
||||
struct Node : std::enable_shared_from_this<Node>
|
||||
{
|
||||
typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge;
|
||||
typedef std::variant<ref<LockedNode>, InputPath> Edge;
|
||||
|
||||
std::map<FlakeId, Edge> inputs;
|
||||
|
||||
|
@ -33,34 +33,37 @@ struct LockedNode : Node
|
|||
FlakeRef lockedRef, originalRef;
|
||||
bool isFlake = true;
|
||||
|
||||
/* The node relative to which relative source paths
|
||||
(e.g. 'path:../foo') are interpreted. */
|
||||
std::optional<InputPath> parentPath;
|
||||
|
||||
LockedNode(
|
||||
const FlakeRef & lockedRef,
|
||||
const FlakeRef & originalRef,
|
||||
bool isFlake = true)
|
||||
: lockedRef(lockedRef), originalRef(originalRef), isFlake(isFlake)
|
||||
bool isFlake = true,
|
||||
std::optional<InputPath> parentPath = {})
|
||||
: lockedRef(lockedRef), originalRef(originalRef), isFlake(isFlake), parentPath(parentPath)
|
||||
{ }
|
||||
|
||||
LockedNode(const nlohmann::json & json);
|
||||
|
||||
StorePath computeStorePath(Store & store) const;
|
||||
};
|
||||
|
||||
struct LockFile
|
||||
{
|
||||
std::shared_ptr<Node> root = std::make_shared<Node>();
|
||||
ref<Node> root = make_ref<Node>();
|
||||
|
||||
LockFile() {};
|
||||
LockFile(const nlohmann::json & json, const Path & path);
|
||||
LockFile(std::string_view contents, std::string_view path);
|
||||
|
||||
nlohmann::json toJSON() const;
|
||||
typedef std::map<ref<const Node>, std::string> KeyMap;
|
||||
|
||||
std::string to_string() const;
|
||||
std::pair<nlohmann::json, KeyMap> toJSON() const;
|
||||
|
||||
static LockFile read(const Path & path);
|
||||
std::pair<std::string, KeyMap> to_string() const;
|
||||
|
||||
void write(const Path & path) const;
|
||||
|
||||
bool isImmutable() const;
|
||||
/* Check whether this lock file has any unlocked inputs. If so,
|
||||
return one. */
|
||||
std::optional<FlakeRef> isUnlocked() const;
|
||||
|
||||
bool operator ==(const LockFile & other) const;
|
||||
|
||||
|
|
|
@ -150,7 +150,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
|||
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
|
||||
const Value * outTI = queryMeta("outputsToInstall");
|
||||
if (!outTI) return outputs;
|
||||
const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
|
||||
auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
|
||||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||
if (!outTI->isList()) throw errMsg;
|
||||
Outputs result;
|
||||
|
|
|
@ -8,6 +8,65 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct SourcePathAdapter : AbstractPos
|
||||
{
|
||||
SourcePath path;
|
||||
|
||||
SourcePathAdapter(SourcePath path)
|
||||
: path(std::move(path))
|
||||
{
|
||||
}
|
||||
|
||||
std::optional<std::string> getSource() const override
|
||||
{
|
||||
try {
|
||||
return path.readFile();
|
||||
} catch (Error &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
void print(std::ostream & out) const override
|
||||
{
|
||||
out << path;
|
||||
}
|
||||
};
|
||||
|
||||
struct StringPosAdapter : AbstractPos
|
||||
{
|
||||
void print(std::ostream & out) const override
|
||||
{
|
||||
out << "«string»";
|
||||
}
|
||||
};
|
||||
|
||||
struct StdinPosAdapter : AbstractPos
|
||||
{
|
||||
void print(std::ostream & out) const override
|
||||
{
|
||||
out << "«stdin»";
|
||||
}
|
||||
};
|
||||
|
||||
Pos::operator std::shared_ptr<AbstractPos>() const
|
||||
{
|
||||
std::shared_ptr<AbstractPos> pos;
|
||||
|
||||
if (auto path = std::get_if<SourcePath>(&origin))
|
||||
pos = std::make_shared<SourcePathAdapter>(*path);
|
||||
else if (std::get_if<stdin_tag>(&origin))
|
||||
pos = std::make_shared<StdinPosAdapter>();
|
||||
else if (std::get_if<string_tag>(&origin))
|
||||
pos = std::make_shared<StringPosAdapter>();
|
||||
|
||||
if (pos) {
|
||||
pos->line = line;
|
||||
pos->column = column;
|
||||
}
|
||||
|
||||
return pos;
|
||||
}
|
||||
|
||||
/* Displaying abstract syntax trees. */
|
||||
|
||||
static void showString(std::ostream & str, std::string_view s)
|
||||
|
@ -71,7 +130,7 @@ void ExprString::show(const SymbolTable & symbols, std::ostream & str) const
|
|||
|
||||
void ExprPath::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
str << s;
|
||||
str << path;
|
||||
}
|
||||
|
||||
void ExprVar::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
|
@ -248,24 +307,10 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
|
|||
|
||||
std::ostream & operator << (std::ostream & str, const Pos & pos)
|
||||
{
|
||||
if (!pos)
|
||||
if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
|
||||
str << *pos2;
|
||||
} else
|
||||
str << "undefined position";
|
||||
else
|
||||
{
|
||||
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
|
||||
switch (pos.origin) {
|
||||
case foFile:
|
||||
f % (const std::string &) pos.file;
|
||||
break;
|
||||
case foStdin:
|
||||
case foString:
|
||||
f % "(string)";
|
||||
break;
|
||||
default:
|
||||
throw Error("unhandled Pos origin!");
|
||||
}
|
||||
str << (f % pos.line % pos.column).str();
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
@ -289,7 +334,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
|||
}
|
||||
|
||||
|
||||
|
||||
/* Computing levels/displacements for variables. */
|
||||
|
||||
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
|
|
|
@ -20,18 +20,23 @@ MakeError(Abort, EvalError);
|
|||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, Error);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
MakeError(RestrictedPathError, Error);
|
||||
|
||||
/* Position objects. */
|
||||
|
||||
struct Pos
|
||||
{
|
||||
std::string file;
|
||||
FileOrigin origin;
|
||||
uint32_t line;
|
||||
uint32_t column;
|
||||
|
||||
struct stdin_tag {};
|
||||
struct string_tag {};
|
||||
|
||||
typedef std::variant<stdin_tag, string_tag, SourcePath> Origin;
|
||||
|
||||
Origin origin;
|
||||
|
||||
explicit operator bool() const { return line > 0; }
|
||||
|
||||
operator std::shared_ptr<AbstractPos>() const;
|
||||
};
|
||||
|
||||
class PosIdx {
|
||||
|
@ -47,7 +52,11 @@ public:
|
|||
|
||||
explicit operator bool() const { return id > 0; }
|
||||
|
||||
bool operator<(const PosIdx other) const { return id < other.id; }
|
||||
bool operator <(const PosIdx other) const { return id < other.id; }
|
||||
|
||||
bool operator ==(const PosIdx other) const { return id == other.id; }
|
||||
|
||||
bool operator !=(const PosIdx other) const { return id != other.id; }
|
||||
};
|
||||
|
||||
class PosTable
|
||||
|
@ -61,13 +70,13 @@ public:
|
|||
// current origins.back() can be reused or not.
|
||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
explicit Origin(uint32_t idx): idx(idx), file{}, origin{} {}
|
||||
// Used for searching in PosTable::[].
|
||||
explicit Origin(uint32_t idx): idx(idx), origin{Pos::stdin_tag()} {}
|
||||
|
||||
public:
|
||||
const std::string file;
|
||||
const FileOrigin origin;
|
||||
const Pos::Origin origin;
|
||||
|
||||
Origin(std::string file, FileOrigin origin): file(std::move(file)), origin(origin) {}
|
||||
Origin(Pos::Origin origin): origin(origin) {}
|
||||
};
|
||||
|
||||
struct Offset {
|
||||
|
@ -107,7 +116,7 @@ public:
|
|||
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||
const auto origin = *std::prev(pastOrigin);
|
||||
const auto offset = offsets[idx];
|
||||
return {origin.file, origin.origin, offset.line, offset.column};
|
||||
return {offset.line, offset.column, origin.origin};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -183,9 +192,13 @@ struct ExprString : Expr
|
|||
|
||||
struct ExprPath : Expr
|
||||
{
|
||||
std::string s;
|
||||
const SourcePath path;
|
||||
Value v;
|
||||
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
|
||||
ExprPath(SourcePath && _path)
|
||||
: path(_path)
|
||||
{
|
||||
v.mkPath(&*path.accessor, path.path.abs().data());
|
||||
}
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
|
|
@ -31,14 +31,9 @@ namespace nix {
|
|||
EvalState & state;
|
||||
SymbolTable & symbols;
|
||||
Expr * result;
|
||||
Path basePath;
|
||||
SourcePath basePath;
|
||||
PosTable::Origin origin;
|
||||
std::optional<ErrorInfo> error;
|
||||
ParseData(EvalState & state, PosTable::Origin origin)
|
||||
: state(state)
|
||||
, symbols(state.symbols)
|
||||
, origin(std::move(origin))
|
||||
{ };
|
||||
};
|
||||
|
||||
struct ParserFormals {
|
||||
|
@ -515,11 +510,8 @@ string_parts_interpolated
|
|||
|
||||
path_start
|
||||
: PATH {
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
$$ = new ExprPath(path);
|
||||
SourcePath path { data->basePath.accessor, CanonPath({$1.p, $1.l}, data->basePath.path) };
|
||||
$$ = new ExprPath(std::move(path));
|
||||
}
|
||||
| HPATH {
|
||||
if (evalSettings.pureEval) {
|
||||
|
@ -529,7 +521,7 @@ path_start
|
|||
);
|
||||
}
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(path);
|
||||
$$ = new ExprPath(data->state.rootPath(path));
|
||||
}
|
||||
;
|
||||
|
||||
|
@ -644,30 +636,29 @@ formal
|
|||
#include "eval.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "tarball.hh"
|
||||
#include "store-api.hh"
|
||||
#include "flake/flake.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
|
||||
const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parse(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
std::string file;
|
||||
switch (origin) {
|
||||
case foFile:
|
||||
file = path;
|
||||
break;
|
||||
case foStdin:
|
||||
case foString:
|
||||
file = text;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
ParseData data(*this, {file, origin});
|
||||
data.basePath = basePath;
|
||||
ParseData data {
|
||||
.state = *this,
|
||||
.symbols = symbols,
|
||||
.basePath = basePath,
|
||||
.origin = {origin},
|
||||
};
|
||||
|
||||
yylex_init(&scanner);
|
||||
yy_scan_buffer(text, length, scanner);
|
||||
|
@ -682,55 +673,43 @@ Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
|
|||
}
|
||||
|
||||
|
||||
Path resolveExprPath(Path path)
|
||||
SourcePath resolveExprPath(const SourcePath & path)
|
||||
{
|
||||
assert(path[0] == '/');
|
||||
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
struct stat st;
|
||||
while (true) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
st = lstat(path);
|
||||
if (!S_ISLNK(st.st_mode)) break;
|
||||
path = absPath(readLink(path), dirOf(path));
|
||||
}
|
||||
auto path2 = path.resolveSymlinks();
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (S_ISDIR(st.st_mode))
|
||||
path = canonPath(path + "/default.nix");
|
||||
if (path2.lstat().type == InputAccessor::tDirectory)
|
||||
return path2 + "default.nix";
|
||||
|
||||
return path;
|
||||
return path2;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const Path & path)
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
{
|
||||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = readFile(path);
|
||||
// readFile should have left some extra space for terminators
|
||||
auto buffer = path.readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
s.append("\0\0", 2);
|
||||
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
|
||||
return parse(s.data(), s.size(), Pos::string_tag(), basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||
{
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
@ -742,7 +721,7 @@ Expr * EvalState::parseStdin()
|
|||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
|
||||
return parse(buffer.data(), buffer.size(), Pos::stdin_tag(), rootPath(absPath(".")), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
|
@ -762,13 +741,13 @@ void EvalState::addToSearchPath(const std::string & s)
|
|||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(const std::string_view path)
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
{
|
||||
for (auto & i : searchPath) {
|
||||
std::string suffix;
|
||||
|
@ -781,14 +760,14 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c
|
|||
continue;
|
||||
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
|
||||
}
|
||||
auto r = resolveSearchPathElem(i);
|
||||
if (!r.first) continue;
|
||||
Path res = r.second + suffix;
|
||||
if (pathExists(res)) return canonPath(res);
|
||||
if (auto path = resolveSearchPathElem(i)) {
|
||||
auto res = *path + CanonPath(suffix);
|
||||
if (res.pathExists()) return res;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return concatStrings(corepkgsPrefix, path.substr(4));
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
debugThrowLastTrace(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
|
@ -800,38 +779,63 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c
|
|||
}
|
||||
|
||||
|
||||
std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathElem & elem)
|
||||
std::optional<SourcePath> EvalState::resolveSearchPathElem(const SearchPathElem & elem, bool initAccessControl)
|
||||
{
|
||||
auto i = searchPathResolved.find(elem.second);
|
||||
if (i != searchPathResolved.end()) return i->second;
|
||||
|
||||
std::pair<bool, std::string> res;
|
||||
std::optional<SourcePath> res;
|
||||
|
||||
if (isUri(elem.second)) {
|
||||
if (EvalSettings::isPseudoUrl(elem.second)) {
|
||||
try {
|
||||
res = { true, store->toRealPath(fetchers::downloadTarball(
|
||||
store, resolveUri(elem.second), "source", false).first.storePath) };
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).first;
|
||||
auto accessor = makeStorePathAccessor(store, storePath);
|
||||
registerAccessor(accessor);
|
||||
res.emplace(accessor->root());
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
|
||||
});
|
||||
res = { false, "" };
|
||||
}
|
||||
} else {
|
||||
auto path = absPath(elem.second);
|
||||
if (pathExists(path))
|
||||
res = { true, path };
|
||||
}
|
||||
|
||||
else if (hasPrefix(elem.second, "flake:")) {
|
||||
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
|
||||
debug("fetching flake search path element '%s''", elem.second);
|
||||
auto [accessor, _] = flakeRef.resolve(store).lazyFetch(store);
|
||||
res.emplace(accessor->root());
|
||||
}
|
||||
|
||||
else {
|
||||
auto path = rootPath(absPath(elem.second));
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path.path.abs());
|
||||
if (store->isInStore(path.path.abs())) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path.path.abs()).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (path.pathExists())
|
||||
res.emplace(path);
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
});
|
||||
res = { false, "" };
|
||||
}
|
||||
}
|
||||
|
||||
debug(format("resolved search path element '%s' to '%s'") % elem.second % res.second);
|
||||
if (res)
|
||||
debug("resolved search path element '%s' to '%s'", elem.second, *res);
|
||||
|
||||
searchPathResolved[elem.second] = res;
|
||||
searchPathResolved.emplace(elem.second, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
96
src/libexpr/paths.cc
Normal file
96
src/libexpr/paths.cc
Normal file
|
@ -0,0 +1,96 @@
|
|||
#include "eval.hh"
|
||||
#include "util.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
SourcePath EvalState::rootPath(const Path & path)
|
||||
{
|
||||
return {rootFS, CanonPath(path)};
|
||||
}
|
||||
|
||||
void EvalState::registerAccessor(ref<InputAccessor> accessor)
|
||||
{
|
||||
inputAccessors.emplace(accessor->number, accessor);
|
||||
}
|
||||
|
||||
static constexpr std::string_view marker = "/__virtual__/";
|
||||
|
||||
std::string EvalState::encodePath(const SourcePath & path)
|
||||
{
|
||||
/* For backward compatibility, return paths in the root FS
|
||||
normally. Encoding any other path is not very reproducible (due
|
||||
to /__virtual__/<N>) and we should depreceate it eventually. So
|
||||
print a warning about use of an encoded path in
|
||||
decodePath(). */
|
||||
return path.accessor == rootFS
|
||||
? path.path.abs()
|
||||
: std::string(marker) + std::to_string(path.accessor->number) + path.path.abs();
|
||||
}
|
||||
|
||||
SourcePath EvalState::decodePath(std::string_view s, PosIdx pos)
|
||||
{
|
||||
if (!hasPrefix(s, "/"))
|
||||
throwEvalError(pos, "string '%1%' doesn't represent an absolute path", s);
|
||||
|
||||
if (hasPrefix(s, marker)) {
|
||||
auto fail = [s]() {
|
||||
throw Error("cannot decode virtual path '%s'", s);
|
||||
};
|
||||
|
||||
s = s.substr(marker.size());
|
||||
|
||||
try {
|
||||
auto slash = s.find('/');
|
||||
if (slash == std::string::npos) fail();
|
||||
size_t number = std::stoi(std::string(s, 0, slash));
|
||||
s = s.substr(slash);
|
||||
|
||||
auto accessor = inputAccessors.find(number);
|
||||
if (accessor == inputAccessors.end()) fail();
|
||||
|
||||
SourcePath path {accessor->second, CanonPath(s)};
|
||||
|
||||
static bool warned = false;
|
||||
warnOnce(warned, "applying 'toString' to path '%s' and then accessing it is deprecated, at %s", path, positions[pos]);
|
||||
|
||||
return path;
|
||||
} catch (std::invalid_argument & e) {
|
||||
fail();
|
||||
abort();
|
||||
}
|
||||
} else
|
||||
return {rootFS, CanonPath(s)};
|
||||
}
|
||||
|
||||
std::string EvalState::decodePaths(std::string_view s)
|
||||
{
|
||||
std::string res;
|
||||
|
||||
size_t pos = 0;
|
||||
|
||||
while (true) {
|
||||
auto m = s.find(marker, pos);
|
||||
if (m == s.npos) {
|
||||
res.append(s.substr(pos));
|
||||
return res;
|
||||
}
|
||||
|
||||
res.append(s.substr(pos, m - pos));
|
||||
|
||||
auto end = s.find_first_of(" \n\r\t'\"’:", m);
|
||||
if (end == s.npos) end = s.size();
|
||||
|
||||
try {
|
||||
auto path = decodePath(s.substr(m, end - m), noPos);
|
||||
res.append(path.to_string());
|
||||
} catch (...) {
|
||||
throw;
|
||||
res.append(s.substr(pos, end - m));
|
||||
}
|
||||
|
||||
pos = end;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -11,6 +11,7 @@
|
|||
#include "value-to-json.hh"
|
||||
#include "value-to-xml.hh"
|
||||
#include "primops.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
#include <boost/container/small_vector.hpp>
|
||||
|
||||
|
@ -83,7 +84,7 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
|||
|
||||
/* Add the output of this derivations to the allowed
|
||||
paths. */
|
||||
if (allowedPaths) {
|
||||
if (rootFS->hasAccessControl()) {
|
||||
for (auto & [_placeholder, outputPath] : res) {
|
||||
allowPath(store->toRealPath(outputPath));
|
||||
}
|
||||
|
@ -92,12 +93,13 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
|||
return res;
|
||||
}
|
||||
|
||||
// FIXME: remove?
|
||||
struct RealisePathFlags {
|
||||
// Whether to check that the path is allowed in pure eval mode
|
||||
bool checkForPureEval = true;
|
||||
};
|
||||
|
||||
static Path realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
||||
{
|
||||
PathSet context;
|
||||
|
||||
|
@ -112,13 +114,12 @@ static Path realisePath(EvalState & state, const PosIdx pos, Value & v, const Re
|
|||
}();
|
||||
|
||||
try {
|
||||
StringMap rewrites = state.realiseContext(context);
|
||||
|
||||
auto realPath = state.toRealPath(rewriteStrings(path, rewrites), context);
|
||||
|
||||
return flags.checkForPureEval
|
||||
? state.checkSourcePath(realPath)
|
||||
: realPath;
|
||||
if (!context.empty()) {
|
||||
auto rewrites = state.realiseContext(context);
|
||||
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
||||
return {path.accessor, CanonPath(realPath)};
|
||||
} else
|
||||
return path;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
||||
throw;
|
||||
|
@ -162,6 +163,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
|||
{
|
||||
auto path = realisePath(state, pos, vPath);
|
||||
|
||||
#if 0
|
||||
// FIXME
|
||||
auto isValidDerivationInStore = [&]() -> std::optional<StorePath> {
|
||||
if (!state.store->isStorePath(path))
|
||||
|
@ -201,13 +203,9 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
|||
state.forceAttrs(v, pos);
|
||||
}
|
||||
|
||||
else if (path == corepkgsPrefix + "fetchurl.nix") {
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "fetchurl.nix.gen.hh"
|
||||
, "/"), v);
|
||||
}
|
||||
|
||||
else {
|
||||
else
|
||||
#endif
|
||||
{
|
||||
if (!vScope)
|
||||
state.evalFile(path, v);
|
||||
else {
|
||||
|
@ -313,6 +311,9 @@ extern "C" typedef void (*ValueInitializer)(EvalState & state, Value & v);
|
|||
/* Load a ValueInitializer from a DSO and return whatever it initializes */
|
||||
void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
throw UnimplementedError("importNative");
|
||||
|
||||
#if 0
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
|
||||
std::string sym(state.forceStringNoCtx(*args[1], pos));
|
||||
|
@ -334,6 +335,7 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
(func)(state, v);
|
||||
|
||||
/* We don't dlclose because v may be a primop referencing a function in the shared object file */
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
@ -367,8 +369,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
auto output = runProgram(program, true, commandArgs);
|
||||
Expr * parsed;
|
||||
try {
|
||||
auto base = state.positions[pos];
|
||||
parsed = state.parseExprFromString(std::move(output), base.file);
|
||||
parsed = state.parseExprFromString(std::move(output), state.rootPath("/"));
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], "While parsing the output from '%1%'", program);
|
||||
throw;
|
||||
|
@ -554,7 +555,8 @@ struct CompareValues
|
|||
case nString:
|
||||
return strcmp(v1->string.s, v2->string.s) < 0;
|
||||
case nPath:
|
||||
return strcmp(v1->path, v2->path) < 0;
|
||||
// FIXME: handle accessor?
|
||||
return strcmp(v1->_path.path, v2->_path.path) < 0;
|
||||
case nList:
|
||||
// Lexicographic comparison
|
||||
for (size_t i = 0;; i++) {
|
||||
|
@ -767,7 +769,7 @@ static RegisterPrimOp primop_abort({
|
|||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto s = state.coerceToString(pos, *args[0], context).toOwned();
|
||||
auto s = state.decodePaths(*state.coerceToString(pos, *args[0], context));
|
||||
state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s));
|
||||
}
|
||||
});
|
||||
|
@ -785,7 +787,7 @@ static RegisterPrimOp primop_throw({
|
|||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto s = state.coerceToString(pos, *args[0], context).toOwned();
|
||||
auto s = state.decodePaths(*state.coerceToString(pos, *args[0], context));
|
||||
state.debugThrowLastTrace(ThrownError(s));
|
||||
}
|
||||
});
|
||||
|
@ -797,7 +799,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
|||
v = *args[1];
|
||||
} catch (Error & e) {
|
||||
PathSet context;
|
||||
e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context).toOwned());
|
||||
e.addTrace(nullptr, state.decodePaths(*state.coerceToString(pos, *args[0], context)));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -1388,12 +1390,13 @@ static RegisterPrimOp primop_placeholder({
|
|||
*************************************************************/
|
||||
|
||||
|
||||
/* Convert the argument to a path. !!! obsolete? */
|
||||
/* Convert the argument to a path and then to a string (confusing,
|
||||
eh?). !!! obsolete? */
|
||||
static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
Path path = state.coerceToPath(pos, *args[0], context);
|
||||
v.mkString(canonPath(path), context);
|
||||
auto path = state.coerceToPath(pos, *args[0], context);
|
||||
v.mkString(path.path.abs(), context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_toPath({
|
||||
|
@ -1423,21 +1426,23 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
|||
}));
|
||||
|
||||
PathSet context;
|
||||
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context));
|
||||
// FIXME: check rootPath
|
||||
auto path = state.coerceToPath(pos, *args[0], context).path;
|
||||
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
||||
directly in the store. The latter condition is necessary so
|
||||
e.g. nix-push does the right thing. */
|
||||
if (!state.store->isStorePath(path)) path = canonPath(path, true);
|
||||
if (!state.store->isInStore(path))
|
||||
if (!state.store->isStorePath(path.abs()))
|
||||
path = CanonPath(canonPath(path.abs(), true));
|
||||
if (!state.store->isInStore(path.abs()))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("path '%1%' is not in the Nix store", path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
auto path2 = state.store->toStorePath(path).first;
|
||||
auto path2 = state.store->toStorePath(path.abs()).first;
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(path2);
|
||||
context.insert(state.store->printStorePath(path2));
|
||||
v.mkString(path, context);
|
||||
v.mkString(path.abs(), context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_storePath({
|
||||
|
@ -1461,14 +1466,14 @@ static RegisterPrimOp primop_storePath({
|
|||
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
/* We don’t check the path right now, because we don’t want to
|
||||
throw if the path isn’t allowed, but just return false (and we
|
||||
can’t just catch the exception here because we still want to
|
||||
throw if something in the evaluation of `*args[0]` tries to
|
||||
access an unauthorized path). */
|
||||
throw if the path isn’t allowed, but just return false (and we
|
||||
can’t just catch the exception here because we still want to
|
||||
throw if something in the evaluation of `*args[0]` tries to
|
||||
access an unauthorized path). */
|
||||
auto path = realisePath(state, pos, *args[0], { .checkForPureEval = false });
|
||||
|
||||
try {
|
||||
v.mkBool(pathExists(state.checkSourcePath(path)));
|
||||
v.mkBool(path.pathExists());
|
||||
} catch (SysError & e) {
|
||||
/* Don't give away info from errors while canonicalising
|
||||
‘path’ in restricted mode. */
|
||||
|
@ -1513,9 +1518,15 @@ static RegisterPrimOp primop_baseNameOf({
|
|||
static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto path = state.coerceToString(pos, *args[0], context, false, false);
|
||||
auto dir = dirOf(*path);
|
||||
if (args[0]->type() == nPath) v.mkPath(dir); else v.mkString(dir, context);
|
||||
state.forceValue(*args[0], pos);
|
||||
if (args[0]->type() == nPath) {
|
||||
auto path = args[0]->path();
|
||||
v.mkPath(path.parent());
|
||||
} else {
|
||||
auto path = state.coerceToString(pos, *args[0], context, false, false);
|
||||
auto dir = dirOf(*path);
|
||||
v.mkString(dir, context);
|
||||
}
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_dirOf({
|
||||
|
@ -1533,16 +1544,14 @@ static RegisterPrimOp primop_dirOf({
|
|||
static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
auto s = readFile(path);
|
||||
auto s = path.readFile();
|
||||
if (s.find((char) 0) != std::string::npos)
|
||||
state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path));
|
||||
StorePathSet refs;
|
||||
if (state.store->isInStore(path)) {
|
||||
try {
|
||||
refs = state.store->queryPathInfo(state.store->toStorePath(path).first)->references;
|
||||
} catch (Error &) { // FIXME: should be InvalidPathError
|
||||
}
|
||||
}
|
||||
// FIXME: only do queryPathInfo if path.accessor is the store accessor
|
||||
auto refs =
|
||||
state.store->isInStore(path.path.abs()) ?
|
||||
state.store->queryPathInfo(state.store->toStorePath(path.path.abs()).first)->references :
|
||||
StorePathSet{};
|
||||
auto context = state.store->printStorePathSet(refs);
|
||||
v.mkString(s, context);
|
||||
}
|
||||
|
@ -1598,7 +1607,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
|
||||
auto path = state.forceStringNoCtx(*args[1], pos);
|
||||
|
||||
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
|
||||
v.mkPath(state.findFile(searchPath, path, pos));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_findFile(RegisterPrimOp::Info {
|
||||
|
@ -1620,7 +1629,8 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
v.mkString(hashFile(*ht, path).to_string(Base16, false));
|
||||
// FIXME: state.toRealPath(path, context)
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashFile({
|
||||
|
@ -1634,23 +1644,30 @@ static RegisterPrimOp primop_hashFile({
|
|||
.fun = prim_hashFile,
|
||||
});
|
||||
|
||||
static std::string_view fileTypeToString(InputAccessor::Type type)
|
||||
{
|
||||
return
|
||||
type == InputAccessor::Type::tRegular ? "regular" :
|
||||
type == InputAccessor::Type::tDirectory ? "directory" :
|
||||
type == InputAccessor::Type::tSymlink ? "symlink" :
|
||||
"unknown";
|
||||
}
|
||||
|
||||
/* Read a directory (without . or ..) */
|
||||
static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
|
||||
DirEntries entries = readDirectory(path);
|
||||
|
||||
auto entries = path.readDirectory();
|
||||
auto attrs = state.buildBindings(entries.size());
|
||||
|
||||
for (auto & ent : entries) {
|
||||
if (ent.type == DT_UNKNOWN)
|
||||
ent.type = getFileType(path + "/" + ent.name);
|
||||
attrs.alloc(ent.name).mkString(
|
||||
ent.type == DT_REG ? "regular" :
|
||||
ent.type == DT_DIR ? "directory" :
|
||||
ent.type == DT_LNK ? "symlink" :
|
||||
"unknown");
|
||||
for (auto & [name, type] : entries) {
|
||||
#if 0
|
||||
// FIXME?
|
||||
if (type == InputAccessor::Type::Misc)
|
||||
type = getFileType(path + "/" + name);
|
||||
#endif
|
||||
attrs.alloc(name).mkString(fileTypeToString(type.value_or(InputAccessor::Type::tMisc)));
|
||||
}
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
|
@ -1956,8 +1973,8 @@ static RegisterPrimOp primop_toFile({
|
|||
static void addPath(
|
||||
EvalState & state,
|
||||
const PosIdx pos,
|
||||
const std::string & name,
|
||||
Path path,
|
||||
std::string_view name,
|
||||
const SourcePath & path,
|
||||
Value * filterFun,
|
||||
FileIngestionMethod method,
|
||||
const std::optional<Hash> expectedHash,
|
||||
|
@ -1965,13 +1982,18 @@ static void addPath(
|
|||
const PathSet & context)
|
||||
{
|
||||
try {
|
||||
// FIXME
|
||||
#if 0
|
||||
// FIXME: handle CA derivation outputs (where path needs to
|
||||
// be rewritten to the actual output).
|
||||
auto rewrites = state.realiseContext(context);
|
||||
path = state.toRealPath(rewriteStrings(path, rewrites), context);
|
||||
#endif
|
||||
|
||||
StorePathSet refs;
|
||||
|
||||
// FIXME
|
||||
#if 0
|
||||
if (state.store->isInStore(path)) {
|
||||
try {
|
||||
auto [storePath, subPath] = state.store->toStorePath(path);
|
||||
|
@ -1981,41 +2003,43 @@ static void addPath(
|
|||
} catch (Error &) { // FIXME: should be InvalidPathError
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
path = evalSettings.pureEval && expectedHash
|
||||
? path
|
||||
: state.checkSourcePath(path);
|
||||
std::unique_ptr<PathFilter> filter;
|
||||
if (filterFun) filter = std::make_unique<PathFilter>([&](const Path & p) {
|
||||
SourcePath path2{path.accessor, CanonPath(p)};
|
||||
|
||||
PathFilter filter = filterFun ? ([&](const Path & path) {
|
||||
auto st = lstat(path);
|
||||
auto st = path2.lstat();
|
||||
|
||||
/* Call the filter function. The first argument is the path,
|
||||
the second is a string indicating the type of the file. */
|
||||
Value arg1;
|
||||
arg1.mkString(path);
|
||||
arg1.mkString(path2.path.abs());
|
||||
|
||||
Value arg2;
|
||||
arg2.mkString(
|
||||
S_ISREG(st.st_mode) ? "regular" :
|
||||
S_ISDIR(st.st_mode) ? "directory" :
|
||||
S_ISLNK(st.st_mode) ? "symlink" :
|
||||
"unknown" /* not supported, will fail! */);
|
||||
// assert that type is not "unknown"
|
||||
arg2.mkString(fileTypeToString(st.type));
|
||||
|
||||
Value * args []{&arg1, &arg2};
|
||||
Value res;
|
||||
state.callFunction(*filterFun, 2, args, res, pos);
|
||||
|
||||
return state.forceBool(res, pos);
|
||||
}) : defaultPathFilter;
|
||||
});
|
||||
|
||||
std::optional<StorePath> expectedStorePath;
|
||||
if (expectedHash)
|
||||
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
|
||||
|
||||
// FIXME: instead of a store path, we could return a
|
||||
// SourcePath that applies the filter lazily and copies to the
|
||||
// store on-demand.
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
StorePath dstPath = settings.readOnlyMode
|
||||
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
|
||||
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs);
|
||||
// FIXME
|
||||
if (method != FileIngestionMethod::Recursive)
|
||||
throw Error("'recursive = false' is not implemented");
|
||||
auto dstPath = path.fetchToStore(state.store, name, filter.get(), state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
|
@ -2031,7 +2055,7 @@ static void addPath(
|
|||
static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
Path path = state.coerceToPath(pos, *args[1], context);
|
||||
auto path = state.coerceToPath(pos, *args[1], context);
|
||||
|
||||
state.forceValue(*args[0], pos);
|
||||
if (args[0]->type() != nFunction)
|
||||
|
@ -2042,7 +2066,7 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
|
|||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_filterSource({
|
||||
|
@ -2103,7 +2127,7 @@ static RegisterPrimOp primop_filterSource({
|
|||
static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceAttrs(*args[0], pos);
|
||||
Path path;
|
||||
std::optional<SourcePath> path;
|
||||
std::string name;
|
||||
Value * filterFun = nullptr;
|
||||
auto method = FileIngestionMethod::Recursive;
|
||||
|
@ -2113,7 +2137,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
for (auto & attr : *args[0]->attrs) {
|
||||
auto n = state.symbols[attr.name];
|
||||
if (n == "path")
|
||||
path = state.coerceToPath(attr.pos, *attr.value, context);
|
||||
path.emplace(state.coerceToPath(attr.pos, *attr.value, context));
|
||||
else if (attr.name == state.sName)
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos);
|
||||
else if (n == "filter") {
|
||||
|
@ -2129,15 +2153,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
}
|
||||
if (path.empty())
|
||||
if (!path)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'path' required"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
if (name.empty())
|
||||
name = baseNameOf(path);
|
||||
name = path->baseName();
|
||||
|
||||
addPath(state, pos, name, path, filterFun, method, expectedHash, v, context);
|
||||
addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_path({
|
||||
|
@ -3994,7 +4018,6 @@ void EvalState::createBaseEnv()
|
|||
|
||||
/* Add a wrapper around the derivation primop that computes the
|
||||
`drvPath' and `outPath' attributes lazily. */
|
||||
sDerivationNix = symbols.create(derivationNixPath);
|
||||
auto vDerivation = allocValue();
|
||||
addConstant("derivation", vDerivation);
|
||||
|
||||
|
@ -4006,12 +4029,7 @@ void EvalState::createBaseEnv()
|
|||
|
||||
/* Note: we have to initialize the 'derivation' constant *after*
|
||||
building baseEnv/staticBaseEnv because it uses 'builtins'. */
|
||||
char code[] =
|
||||
#include "primops/derivation.nix.gen.hh"
|
||||
// the parser needs two NUL bytes as terminators; one of them
|
||||
// is implied by being a C string.
|
||||
"\0";
|
||||
eval(parse(code, sizeof(code), foFile, derivationNixPath, "/", staticBaseEnv), *vDerivation);
|
||||
evalFile(derivationInternal, *vDerivation);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -68,11 +68,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
|
||||
// FIXME: use name
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
auto [storePath, input2] = input.fetchToStore(state.store);
|
||||
|
||||
auto attrs2 = state.buildBindings(8);
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
attrs2.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
auto storePath2 = state.store->printStorePath(storePath);
|
||||
attrs2.alloc(state.sOutPath).mkString(storePath2, {storePath2});
|
||||
if (input2.getRef())
|
||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
|
@ -84,7 +84,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
attrs2.alloc("revCount").mkInt(*revCount);
|
||||
v.mkAttrs(attrs2);
|
||||
|
||||
state.allowPath(tree.storePath);
|
||||
state.allowPath(storePath);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "fetchers.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "registry.hh"
|
||||
#include "tarball.hh"
|
||||
|
||||
#include <ctime>
|
||||
#include <iomanip>
|
||||
|
@ -11,27 +12,22 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void emitTreeAttrs(
|
||||
static void emitTreeAttrs(
|
||||
EvalState & state,
|
||||
const fetchers::Tree & tree,
|
||||
const fetchers::Input & input,
|
||||
Value & v,
|
||||
std::function<void(Value &)> setOutPath,
|
||||
bool emptyRevFallback,
|
||||
bool forceDirty)
|
||||
{
|
||||
assert(input.isLocked());
|
||||
|
||||
auto attrs = state.buildBindings(8);
|
||||
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
|
||||
attrs.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
setOutPath(attrs.alloc(state.sOutPath));
|
||||
|
||||
// FIXME: support arbitrary input attributes.
|
||||
|
||||
auto narHash = input.getNarHash();
|
||||
assert(narHash);
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
|
||||
if (auto narHash = input.getNarHash())
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
|
||||
|
||||
if (input.getType() == "git")
|
||||
attrs.alloc("submodules").mkBool(
|
||||
|
@ -65,6 +61,22 @@ void emitTreeAttrs(
|
|||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
void emitTreeAttrs(
|
||||
EvalState & state,
|
||||
const SourcePath & path,
|
||||
const fetchers::Input & input,
|
||||
Value & v,
|
||||
bool emptyRevFallback,
|
||||
bool forceDirty)
|
||||
{
|
||||
emitTreeAttrs(state, input, v,
|
||||
[&](Value & vOutPath) {
|
||||
vOutPath.mkPath(path);
|
||||
},
|
||||
emptyRevFallback,
|
||||
forceDirty);
|
||||
}
|
||||
|
||||
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
|
||||
{
|
||||
state.checkURI(uri);
|
||||
|
@ -86,6 +98,7 @@ std::string fixURIForGit(std::string uri, EvalState & state)
|
|||
struct FetchTreeParams {
|
||||
bool emptyRevFallback = false;
|
||||
bool allowNameArgument = false;
|
||||
bool returnPath = true; // whether to return a lazily fetched SourcePath or a StorePath
|
||||
};
|
||||
|
||||
static void fetchTree(
|
||||
|
@ -123,7 +136,9 @@ static void fetchTree(
|
|||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
if (attr.name == state.sType) continue;
|
||||
|
||||
state.forceValue(*attr.value, attr.pos);
|
||||
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||
auto s = state.coerceToString(attr.pos, *attr.value, context, false, false).toOwned();
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
|
@ -169,11 +184,33 @@ static void fetchTree(
|
|||
if (evalSettings.pureEval && !input.isLocked())
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
|
||||
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
if (params.returnPath) {
|
||||
auto [accessor, input2] = input.getAccessor(state.store);
|
||||
|
||||
state.allowPath(tree.storePath);
|
||||
state.registerAccessor(accessor);
|
||||
|
||||
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
{ accessor, CanonPath::root },
|
||||
input2,
|
||||
v,
|
||||
params.emptyRevFallback,
|
||||
false);
|
||||
} else {
|
||||
auto [storePath, input2] = input.fetchToStore(state.store);
|
||||
|
||||
auto storePath2 = state.store->printStorePath(storePath);
|
||||
|
||||
emitTreeAttrs(
|
||||
state, input2, v,
|
||||
[&](Value & vOutPath) {
|
||||
vOutPath.mkString(storePath2, {storePath2});
|
||||
},
|
||||
params.emptyRevFallback,
|
||||
false);
|
||||
|
||||
state.allowPath(storePath);
|
||||
}
|
||||
}
|
||||
|
||||
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
|
@ -220,8 +257,6 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
} else
|
||||
url = state.forceStringNoCtx(*args[0], pos);
|
||||
|
||||
url = resolveUri(*url);
|
||||
|
||||
state.checkURI(*url);
|
||||
|
||||
if (name == "")
|
||||
|
@ -247,7 +282,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
// https://github.com/NixOS/nix/issues/4313
|
||||
auto storePath =
|
||||
unpack
|
||||
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
|
||||
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first
|
||||
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
||||
|
||||
if (expectedHash) {
|
||||
|
@ -331,7 +366,13 @@ static RegisterPrimOp primop_fetchTarball({
|
|||
|
||||
static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true });
|
||||
fetchTree(
|
||||
state, pos, args, v, "git",
|
||||
FetchTreeParams {
|
||||
.emptyRevFallback = true,
|
||||
.allowNameArgument = true,
|
||||
.returnPath = false,
|
||||
});
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fetchGit({
|
||||
|
|
126
src/libexpr/primops/patch.cc
Normal file
126
src/libexpr/primops/patch.cc
Normal file
|
@ -0,0 +1,126 @@
|
|||
#include "primops.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_patch(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
std::vector<std::string> patches;
|
||||
std::optional<SourcePath> src;
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
std::string_view n(state.symbols[attr.name]);
|
||||
|
||||
auto check = [&]()
|
||||
{
|
||||
if (!patches.empty())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'builtins.patch' does not support both 'patches' and 'patchFiles'"),
|
||||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
};
|
||||
|
||||
if (n == "src") {
|
||||
PathSet context;
|
||||
src.emplace(state.coerceToPath(pos, *attr.value, context));
|
||||
}
|
||||
|
||||
else if (n == "patchFiles") {
|
||||
check();
|
||||
state.forceList(*attr.value, attr.pos);
|
||||
for (auto elem : attr.value->listItems()) {
|
||||
// FIXME: use realisePath
|
||||
PathSet context;
|
||||
auto patchFile = state.coerceToPath(attr.pos, *elem, context);
|
||||
patches.push_back(patchFile.readFile());
|
||||
}
|
||||
}
|
||||
|
||||
else if (n == "patches") {
|
||||
check();
|
||||
state.forceList(*attr.value, attr.pos);
|
||||
for (auto elem : attr.value->listItems())
|
||||
patches.push_back(std::string(state.forceStringNoCtx(*elem, attr.pos)));
|
||||
}
|
||||
|
||||
else
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' isn't supported in call to 'builtins.patch'", n),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
if (!src)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("attribute 'src' is missing in call to 'builtins.patch'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
if (!src->path.isRoot())
|
||||
throw UnimplementedError("applying patches to a non-root path ('%s') is not yet supported", src->path);
|
||||
|
||||
auto accessor = makePatchingInputAccessor(src->accessor, patches);
|
||||
|
||||
state.registerAccessor(accessor);
|
||||
|
||||
v.mkPath(SourcePath{accessor, src->path});
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_patch({
|
||||
.name = "__patch",
|
||||
.args = {"args"},
|
||||
.doc = R"(
|
||||
Apply patches to a source tree. This function has the following required argument:
|
||||
|
||||
- src\
|
||||
The input source tree.
|
||||
|
||||
It also takes one of the following:
|
||||
|
||||
- patchFiles\
|
||||
A list of patch files to be applied to `src`.
|
||||
|
||||
- patches\
|
||||
A list of patches (i.e. strings) to be applied to `src`.
|
||||
|
||||
It returns a source tree that lazily and non-destructively
|
||||
applies the specified patches to `src`.
|
||||
|
||||
Example:
|
||||
|
||||
```nix
|
||||
let
|
||||
tree = builtins.patch {
|
||||
src = fetchTree {
|
||||
type = "github";
|
||||
owner = "NixOS";
|
||||
repo = "patchelf";
|
||||
rev = "be0cc30a59b2755844bcd48823f6fbc8d97b93a7";
|
||||
};
|
||||
patches = [
|
||||
''
|
||||
diff --git a/src/patchelf.cc b/src/patchelf.cc
|
||||
index 6882b28..28f511c 100644
|
||||
--- a/src/patchelf.cc
|
||||
+++ b/src/patchelf.cc
|
||||
@@ -1844,6 +1844,8 @@ void showHelp(const std::string & progName)
|
||||
|
||||
int mainWrapped(int argc, char * * argv)
|
||||
{
|
||||
+ printf("Hello!");
|
||||
+
|
||||
if (argc <= 1) {
|
||||
showHelp(argv[0]);
|
||||
return 1;
|
||||
|
||||
''
|
||||
];
|
||||
};
|
||||
in builtins.readFile (tree + "/src/patchelf.cc")
|
||||
```
|
||||
)",
|
||||
.fun = prim_patch,
|
||||
});
|
||||
|
||||
}
|
|
@ -62,7 +62,7 @@ namespace nix {
|
|||
// not supported by store 'dummy'" thrown in the test body.
|
||||
TEST_F(JSONValueTest, DISABLED_Path) {
|
||||
Value v;
|
||||
v.mkPath("test");
|
||||
v.mkPath(state.rootPath("/test"));
|
||||
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
|
||||
}
|
||||
} /* namespace nix */
|
||||
|
|
|
@ -23,7 +23,7 @@ namespace nix {
|
|||
}
|
||||
Value eval(std::string input, bool forceValue = true) {
|
||||
Value v;
|
||||
Expr * e = state.parseExprFromString(input, "");
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath("/"));
|
||||
assert(e);
|
||||
state.eval(e, v);
|
||||
if (forceValue)
|
||||
|
@ -99,14 +99,17 @@ namespace nix {
|
|||
}
|
||||
|
||||
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
|
||||
if (arg.type() != nPath) {
|
||||
*result_listener << "Expected a path got " << arg.type();
|
||||
return false;
|
||||
} else if (std::string_view(arg.string.s) != p) {
|
||||
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s;
|
||||
if (arg.type() != nPath) {
|
||||
*result_listener << "Expected a path got " << arg.type();
|
||||
return false;
|
||||
} else {
|
||||
auto path = arg.path();
|
||||
if (path.path != CanonPath(p)) {
|
||||
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ libexpr-tests_INSTALL_DIR :=
|
|||
|
||||
libexpr-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
|
||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
|
||||
|
||||
libexpr-tests_LIBS = libexpr libutil libstore libfetchers
|
||||
|
||||
|
|
|
@ -148,15 +148,17 @@ namespace nix {
|
|||
}
|
||||
|
||||
TEST_F(PrimOpTest, unsafeGetAttrPos) {
|
||||
// The `y` attribute is at position
|
||||
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
|
||||
state.corepkgsFS->addFile(CanonPath("foo.nix"), "{ y = \"x\"; }");
|
||||
|
||||
auto expr = "builtins.unsafeGetAttrPos \"y\" (import <nix/foo.nix>)";
|
||||
auto v = eval(expr);
|
||||
ASSERT_THAT(v, IsAttrsOfSize(3));
|
||||
|
||||
auto file = v.attrs->find(createSymbol("file"));
|
||||
ASSERT_NE(file, nullptr);
|
||||
// FIXME: The file when running these tests is the input string?!?
|
||||
ASSERT_THAT(*file->value, IsStringEq(expr));
|
||||
ASSERT_THAT(*file->value, IsString());
|
||||
auto s = baseNameOf(file->value->string.s);
|
||||
ASSERT_EQ(s, "foo.nix");
|
||||
|
||||
auto line = v.attrs->find(createSymbol("line"));
|
||||
ASSERT_NE(line, nullptr);
|
||||
|
@ -164,7 +166,7 @@ namespace nix {
|
|||
|
||||
auto column = v.attrs->find(createSymbol("column"));
|
||||
ASSERT_NE(column, nullptr);
|
||||
ASSERT_THAT(*column->value, IsIntEq(33));
|
||||
ASSERT_THAT(*column->value, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hasAttr) {
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "json.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <iomanip>
|
||||
|
@ -33,9 +34,11 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
|
||||
case nPath:
|
||||
if (copyToStore)
|
||||
out.write(state.copyPathToStore(context, v.path));
|
||||
out.write(
|
||||
state.store->printStorePath(
|
||||
state.copyPathToStore(context, v.path())));
|
||||
else
|
||||
out.write(v.path);
|
||||
out.write(v.path().path.abs());
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
|
|
|
@ -24,7 +24,8 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
|
||||
static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
|
||||
{
|
||||
xmlAttrs["path"] = pos.file;
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
xmlAttrs["path"] = path->path.abs();
|
||||
xmlAttrs["line"] = (format("%1%") % pos.line).str();
|
||||
xmlAttrs["column"] = (format("%1%") % pos.column).str();
|
||||
}
|
||||
|
@ -77,7 +78,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
break;
|
||||
|
||||
case nPath:
|
||||
doc.writeEmptyElement("path", singletonAttrs("value", v.path));
|
||||
doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string()));
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include <cassert>
|
||||
|
||||
#include "symbol-table.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#include <gc/gc_allocator.h>
|
||||
|
@ -171,7 +172,11 @@ public:
|
|||
const char * * context; // must be in sorted order
|
||||
} string;
|
||||
|
||||
const char * path;
|
||||
struct {
|
||||
InputAccessor * accessor;
|
||||
const char * path;
|
||||
} _path;
|
||||
|
||||
Bindings * attrs;
|
||||
struct {
|
||||
size_t size;
|
||||
|
@ -251,15 +256,21 @@ public:
|
|||
|
||||
void mkStringMove(const char * s, const PathSet & context);
|
||||
|
||||
inline void mkPath(const char * s)
|
||||
inline void mkString(const Symbol & s)
|
||||
{
|
||||
mkString(((const std::string &) s).c_str());
|
||||
}
|
||||
|
||||
void mkPath(const SourcePath & path);
|
||||
|
||||
inline void mkPath(InputAccessor * accessor, const char * path)
|
||||
{
|
||||
clearValue();
|
||||
internalType = tPath;
|
||||
path = s;
|
||||
_path.accessor = accessor;
|
||||
_path.path = path;
|
||||
}
|
||||
|
||||
void mkPath(std::string_view s);
|
||||
|
||||
inline void mkNull()
|
||||
{
|
||||
clearValue();
|
||||
|
@ -400,6 +411,21 @@ public:
|
|||
auto begin = listElems();
|
||||
return ConstListIterable { begin, begin + listSize() };
|
||||
}
|
||||
|
||||
SourcePath path() const
|
||||
{
|
||||
assert(internalType == tPath);
|
||||
return SourcePath {
|
||||
.accessor = ref(_path.accessor->shared_from_this()),
|
||||
.path = CanonPath(CanonPath::unchecked_t(), _path.path)
|
||||
};
|
||||
}
|
||||
|
||||
std::string_view str() const
|
||||
{
|
||||
assert(internalType == tString);
|
||||
return std::string_view(string.s);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -17,14 +17,23 @@ create table if not exists Cache (
|
|||
timestamp integer not null,
|
||||
primary key (input)
|
||||
);
|
||||
|
||||
create table if not exists Facts (
|
||||
name text not null,
|
||||
value text not null,
|
||||
primary key (name)
|
||||
);
|
||||
)sql";
|
||||
|
||||
// FIXME: we should periodically purge/nuke this cache to prevent it
|
||||
// from growing too big.
|
||||
|
||||
struct CacheImpl : Cache
|
||||
{
|
||||
struct State
|
||||
{
|
||||
SQLite db;
|
||||
SQLiteStmt add, lookup;
|
||||
SQLiteStmt add, lookup, upsertFact, queryFact;
|
||||
};
|
||||
|
||||
Sync<State> _state;
|
||||
|
@ -33,7 +42,7 @@ struct CacheImpl : Cache
|
|||
{
|
||||
auto state(_state.lock());
|
||||
|
||||
auto dbPath = getCacheDir() + "/nix/fetcher-cache-v1.sqlite";
|
||||
auto dbPath = getCacheDir() + "/nix/fetcher-cache-v2.sqlite";
|
||||
createDirs(dirOf(dbPath));
|
||||
|
||||
state->db = SQLite(dbPath);
|
||||
|
@ -45,6 +54,12 @@ struct CacheImpl : Cache
|
|||
|
||||
state->lookup.create(state->db,
|
||||
"select info, path, immutable, timestamp from Cache where input = ?");
|
||||
|
||||
state->upsertFact.create(state->db,
|
||||
"insert or replace into Facts(name, value) values (?, ?)");
|
||||
|
||||
state->queryFact.create(state->db,
|
||||
"select value from Facts where name = ?");
|
||||
}
|
||||
|
||||
void add(
|
||||
|
@ -110,6 +125,26 @@ struct CacheImpl : Cache
|
|||
.storePath = std::move(storePath)
|
||||
};
|
||||
}
|
||||
|
||||
void upsertFact(
|
||||
std::string_view key,
|
||||
std::string_view value) override
|
||||
{
|
||||
debug("upserting fact '%s' -> '%s'", key, value);
|
||||
_state.lock()->upsertFact.use()
|
||||
(key)
|
||||
(value).exec();
|
||||
}
|
||||
|
||||
std::optional<std::string> queryFact(std::string_view key) override
|
||||
{
|
||||
auto state(_state.lock());
|
||||
|
||||
auto stmt(state->queryFact.use()(key));
|
||||
if (!stmt.next()) return {};
|
||||
|
||||
return stmt.getStr(0);
|
||||
}
|
||||
};
|
||||
|
||||
ref<Cache> getCache()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include "fetchers.hh"
|
||||
#include "path.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -29,6 +30,14 @@ struct Cache
|
|||
virtual std::optional<Result> lookupExpired(
|
||||
ref<Store> store,
|
||||
const Attrs & inAttrs) = 0;
|
||||
|
||||
/* A simple key/value store for immutable facts such as the
|
||||
revcount corresponding to a rev. */
|
||||
virtual void upsertFact(
|
||||
std::string_view key,
|
||||
std::string_view value) = 0;
|
||||
|
||||
virtual std::optional<std::string> queryFact(std::string_view key) = 0;
|
||||
};
|
||||
|
||||
ref<Cache> getCache();
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include "fetchers.hh"
|
||||
#include "store-api.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -23,12 +24,8 @@ static void fixupInput(Input & input)
|
|||
// Check common attributes.
|
||||
input.getType();
|
||||
input.getRef();
|
||||
if (input.getRev())
|
||||
input.locked = true;
|
||||
input.getRevCount();
|
||||
input.getLastModified();
|
||||
if (input.getNarHash())
|
||||
input.locked = true;
|
||||
}
|
||||
|
||||
Input Input::fromURL(const ParsedURL & url)
|
||||
|
@ -87,9 +84,21 @@ Attrs Input::toAttrs() const
|
|||
return attrs;
|
||||
}
|
||||
|
||||
bool Input::hasAllInfo() const
|
||||
bool Input::isDirect() const
|
||||
{
|
||||
return getNarHash() && scheme && scheme->hasAllInfo(*this);
|
||||
assert(scheme);
|
||||
return !scheme || scheme->isDirect(*this);
|
||||
}
|
||||
|
||||
bool Input::isLocked() const
|
||||
{
|
||||
return scheme && scheme->isLocked(*this);
|
||||
}
|
||||
|
||||
std::optional<std::string> Input::isRelative() const
|
||||
{
|
||||
assert(scheme);
|
||||
return scheme->isRelative(*this);
|
||||
}
|
||||
|
||||
bool Input::operator ==(const Input & other) const
|
||||
|
@ -107,50 +116,28 @@ bool Input::contains(const Input & other) const
|
|||
return false;
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
||||
std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
|
||||
{
|
||||
if (!scheme)
|
||||
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
|
||||
|
||||
/* The tree may already be in the Nix store, or it could be
|
||||
substituted (which is often faster than fetching from the
|
||||
original source). So check that. */
|
||||
if (hasAllInfo()) {
|
||||
try {
|
||||
auto storePath = computeStorePath(*store);
|
||||
|
||||
store->ensurePath(storePath);
|
||||
|
||||
debug("using substituted/cached input '%s' in '%s'",
|
||||
to_string(), store->printStorePath(storePath));
|
||||
|
||||
return {Tree { .actualPath = store->toRealPath(storePath), .storePath = std::move(storePath) }, *this};
|
||||
} catch (Error & e) {
|
||||
debug("substitution of input '%s' failed: %s", to_string(), e.what());
|
||||
}
|
||||
}
|
||||
|
||||
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
|
||||
try {
|
||||
return scheme->fetch(store, *this);
|
||||
auto [accessor, input2] = getAccessor(store);
|
||||
auto storePath = accessor->root().fetchToStore(store, input2.getName());
|
||||
return {storePath, input2};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
}
|
||||
}();
|
||||
|
||||
Tree tree {
|
||||
.actualPath = store->toRealPath(storePath),
|
||||
.storePath = storePath,
|
||||
};
|
||||
|
||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
|
||||
void Input::checkLocks(Input & input) const
|
||||
{
|
||||
if (auto prevNarHash = getNarHash()) {
|
||||
if (narHash != *prevNarHash)
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
|
||||
if (input.getNarHash() != prevNarHash)
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s'",
|
||||
to_string(), prevNarHash->to_string(SRI, true));
|
||||
}
|
||||
|
||||
if (auto prevLastModified = getLastModified()) {
|
||||
|
@ -164,12 +151,24 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
|
||||
input.to_string(), *prevRevCount);
|
||||
}
|
||||
}
|
||||
|
||||
input.locked = true;
|
||||
std::pair<ref<InputAccessor>, Input> Input::getAccessor(ref<Store> store) const
|
||||
{
|
||||
// FIXME: cache the accessor
|
||||
|
||||
assert(input.hasAllInfo());
|
||||
if (!scheme)
|
||||
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
|
||||
|
||||
return {std::move(tree), input};
|
||||
try {
|
||||
auto [accessor, final] = scheme->getAccessor(store, *this);
|
||||
accessor->fingerprint = scheme->getFingerprint(store, final);
|
||||
checkLocks(final);
|
||||
return {accessor, std::move(final)};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
Input Input::applyOverrides(
|
||||
|
@ -186,18 +185,13 @@ void Input::clone(const Path & destDir) const
|
|||
scheme->clone(*this, destDir);
|
||||
}
|
||||
|
||||
std::optional<Path> Input::getSourcePath() const
|
||||
{
|
||||
assert(scheme);
|
||||
return scheme->getSourcePath(*this);
|
||||
}
|
||||
|
||||
void Input::markChangedFile(
|
||||
std::string_view file,
|
||||
void Input::putFile(
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
{
|
||||
assert(scheme);
|
||||
return scheme->markChangedFile(*this, file, commitMsg);
|
||||
return scheme->putFile(*this, path, contents, commitMsg);
|
||||
}
|
||||
|
||||
std::string Input::getName() const
|
||||
|
@ -205,14 +199,6 @@ std::string Input::getName() const
|
|||
return maybeGetStrAttr(attrs, "name").value_or("source");
|
||||
}
|
||||
|
||||
StorePath Input::computeStorePath(Store & store) const
|
||||
{
|
||||
auto narHash = getNarHash();
|
||||
if (!narHash)
|
||||
throw Error("cannot compute store path for unlocked input '%s'", to_string());
|
||||
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
|
||||
}
|
||||
|
||||
std::string Input::getType() const
|
||||
{
|
||||
return getStrAttr(attrs, "type");
|
||||
|
@ -266,7 +252,12 @@ std::optional<time_t> Input::getLastModified() const
|
|||
return {};
|
||||
}
|
||||
|
||||
ParsedURL InputScheme::toURL(const Input & input)
|
||||
std::optional<std::string> Input::getFingerprint(ref<Store> store) const
|
||||
{
|
||||
return scheme ? scheme->getFingerprint(store, *this) : std::nullopt;
|
||||
}
|
||||
|
||||
ParsedURL InputScheme::toURL(const Input & input) const
|
||||
{
|
||||
throw Error("don't know how to convert input '%s' to a URL", attrsToJSON(input.attrs));
|
||||
}
|
||||
|
@ -274,7 +265,7 @@ ParsedURL InputScheme::toURL(const Input & input)
|
|||
Input InputScheme::applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev)
|
||||
std::optional<Hash> rev) const
|
||||
{
|
||||
if (ref)
|
||||
throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref);
|
||||
|
@ -283,19 +274,26 @@ Input InputScheme::applyOverrides(
|
|||
return input;
|
||||
}
|
||||
|
||||
std::optional<Path> InputScheme::getSourcePath(const Input & input)
|
||||
void InputScheme::putFile(
|
||||
const Input & input,
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
{
|
||||
return {};
|
||||
throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path);
|
||||
}
|
||||
|
||||
void InputScheme::markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg)
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
|
||||
void InputScheme::clone(const Input & input, const Path & destDir)
|
||||
void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||
{
|
||||
throw Error("do not know how to clone input '%s'", input.to_string());
|
||||
}
|
||||
|
||||
std::optional<std::string> InputScheme::getFingerprint(ref<Store> store, const Input & input) const
|
||||
{
|
||||
if (auto rev = input.getRev())
|
||||
return rev->gitRev();
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,22 +2,16 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "hash.hh"
|
||||
#include "path.hh"
|
||||
#include "canon-path.hh"
|
||||
#include "attrs.hh"
|
||||
#include "url.hh"
|
||||
|
||||
#include <memory>
|
||||
|
||||
namespace nix { class Store; }
|
||||
namespace nix { class Store; class StorePath; class InputAccessor; }
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
struct Tree
|
||||
{
|
||||
Path actualPath;
|
||||
StorePath storePath;
|
||||
};
|
||||
|
||||
struct InputScheme;
|
||||
|
||||
/* The Input object is generated by a specific fetcher, based on the
|
||||
|
@ -27,18 +21,12 @@ struct InputScheme;
|
|||
* "fromURL()" or "fromAttrs()" static functions which are provided
|
||||
* the url or attrset specified in the flake file.
|
||||
*/
|
||||
|
||||
struct Input
|
||||
{
|
||||
friend struct InputScheme;
|
||||
|
||||
std::shared_ptr<InputScheme> scheme; // note: can be null
|
||||
Attrs attrs;
|
||||
bool locked = false;
|
||||
bool direct = true;
|
||||
|
||||
/* path of the parent of this input, used for relative path resolution */
|
||||
std::optional<Path> parent;
|
||||
|
||||
public:
|
||||
static Input fromURL(const std::string & url);
|
||||
|
@ -57,21 +45,28 @@ public:
|
|||
|
||||
/* Check whether this is a "direct" input, that is, not
|
||||
one that goes through a registry. */
|
||||
bool isDirect() const { return direct; }
|
||||
bool isDirect() const;
|
||||
|
||||
/* Check whether this is a "locked" input, that is,
|
||||
one that contains a commit hash or content hash. */
|
||||
bool isLocked() const { return locked; }
|
||||
bool isLocked() const;
|
||||
|
||||
bool hasAllInfo() const;
|
||||
/* Only for relative path flakes, i.e. 'path:./foo', returns the
|
||||
relative path, i.e. './foo'. */
|
||||
std::optional<std::string> isRelative() const;
|
||||
|
||||
bool operator ==(const Input & other) const;
|
||||
|
||||
bool contains(const Input & other) const;
|
||||
|
||||
/* Fetch the input into the Nix store, returning the location in
|
||||
the Nix store and the locked input. */
|
||||
std::pair<Tree, Input> fetch(ref<Store> store) const;
|
||||
/* Fetch the entire input into the Nix store, returning the
|
||||
location in the Nix store and the locked input. */
|
||||
std::pair<StorePath, Input> fetchToStore(ref<Store> store) const;
|
||||
|
||||
/* Return an InputAccessor that allows access to files in the
|
||||
input without copying it to the store. Also return a possibly
|
||||
unlocked input. */
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store) const;
|
||||
|
||||
Input applyOverrides(
|
||||
std::optional<std::string> ref,
|
||||
|
@ -79,16 +74,13 @@ public:
|
|||
|
||||
void clone(const Path & destDir) const;
|
||||
|
||||
std::optional<Path> getSourcePath() const;
|
||||
|
||||
void markChangedFile(
|
||||
std::string_view file,
|
||||
void putFile(
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const;
|
||||
|
||||
std::string getName() const;
|
||||
|
||||
StorePath computeStorePath(Store & store) const;
|
||||
|
||||
// Convenience functions for common attributes.
|
||||
std::string getType() const;
|
||||
std::optional<Hash> getNarHash() const;
|
||||
|
@ -96,8 +88,15 @@ public:
|
|||
std::optional<Hash> getRev() const;
|
||||
std::optional<uint64_t> getRevCount() const;
|
||||
std::optional<time_t> getLastModified() const;
|
||||
};
|
||||
|
||||
// For locked inputs, returns a string that uniquely specifies the
|
||||
// content of the input (typically a commit hash or content hash).
|
||||
std::optional<std::string> getFingerprint(ref<Store> store) const;
|
||||
|
||||
private:
|
||||
|
||||
void checkLocks(Input & input) const;
|
||||
};
|
||||
|
||||
/* The InputScheme represents a type of fetcher. Each fetcher
|
||||
* registers with nix at startup time. When processing an input for a
|
||||
|
@ -107,55 +106,44 @@ public:
|
|||
* recognized. The Input object contains the information the fetcher
|
||||
* needs to actually perform the "fetch()" when called.
|
||||
*/
|
||||
|
||||
struct InputScheme
|
||||
{
|
||||
virtual ~InputScheme()
|
||||
{ }
|
||||
|
||||
virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0;
|
||||
virtual std::optional<Input> inputFromURL(const ParsedURL & url) const = 0;
|
||||
|
||||
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0;
|
||||
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
|
||||
|
||||
virtual ParsedURL toURL(const Input & input);
|
||||
|
||||
virtual bool hasAllInfo(const Input & input) = 0;
|
||||
virtual ParsedURL toURL(const Input & input) const;
|
||||
|
||||
virtual Input applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev);
|
||||
std::optional<Hash> rev) const;
|
||||
|
||||
virtual void clone(const Input & input, const Path & destDir);
|
||||
virtual void clone(const Input & input, const Path & destDir) const;
|
||||
|
||||
virtual std::optional<Path> getSourcePath(const Input & input);
|
||||
virtual void putFile(
|
||||
const Input & input,
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const;
|
||||
|
||||
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
||||
virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const = 0;
|
||||
|
||||
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||
virtual bool isDirect(const Input & input) const
|
||||
{ return true; }
|
||||
|
||||
virtual bool isLocked(const Input & input) const
|
||||
{ return false; }
|
||||
|
||||
virtual std::optional<std::string> isRelative(const Input & input) const
|
||||
{ return std::nullopt; }
|
||||
|
||||
virtual std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const;
|
||||
};
|
||||
|
||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||
|
||||
struct DownloadFileResult
|
||||
{
|
||||
StorePath storePath;
|
||||
std::string etag;
|
||||
std::string effectiveUrl;
|
||||
};
|
||||
|
||||
DownloadFileResult downloadFile(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
std::pair<Tree, time_t> downloadTarball(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
}
|
||||
|
|
140
src/libfetchers/fs-input-accessor.cc
Normal file
140
src/libfetchers/fs-input-accessor.cc
Normal file
|
@ -0,0 +1,140 @@
|
|||
#include "fs-input-accessor.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct FSInputAccessorImpl : FSInputAccessor
|
||||
{
|
||||
CanonPath root;
|
||||
std::optional<std::set<CanonPath>> allowedPaths;
|
||||
MakeNotAllowedError makeNotAllowedError;
|
||||
|
||||
FSInputAccessorImpl(
|
||||
const CanonPath & root,
|
||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
: root(root)
|
||||
, allowedPaths(std::move(allowedPaths))
|
||||
, makeNotAllowedError(std::move(makeNotAllowedError))
|
||||
{
|
||||
displayPrefix = root.isRoot() ? "" : root.abs();
|
||||
}
|
||||
|
||||
std::string readFile(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
return nix::readFile(absPath.abs());
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
return isAllowed(absPath) && nix::pathExists(absPath.abs());
|
||||
}
|
||||
|
||||
Stat lstat(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
auto st = nix::lstat(absPath.abs());
|
||||
return Stat {
|
||||
.type =
|
||||
S_ISREG(st.st_mode) ? tRegular :
|
||||
S_ISDIR(st.st_mode) ? tDirectory :
|
||||
S_ISLNK(st.st_mode) ? tSymlink :
|
||||
tMisc,
|
||||
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR
|
||||
};
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
DirEntries res;
|
||||
for (auto & entry : nix::readDirectory(absPath.abs())) {
|
||||
std::optional<Type> type;
|
||||
switch (entry.type) {
|
||||
case DT_REG: type = Type::tRegular; break;
|
||||
case DT_LNK: type = Type::tSymlink; break;
|
||||
case DT_DIR: type = Type::tDirectory; break;
|
||||
}
|
||||
if (isAllowed(absPath + entry.name))
|
||||
res.emplace(entry.name, type);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
return nix::readLink(absPath.abs());
|
||||
}
|
||||
|
||||
CanonPath makeAbsPath(const CanonPath & path)
|
||||
{
|
||||
return root + path;
|
||||
}
|
||||
|
||||
void checkAllowed(const CanonPath & absPath) override
|
||||
{
|
||||
if (!isAllowed(absPath))
|
||||
throw makeNotAllowedError
|
||||
? makeNotAllowedError(absPath)
|
||||
: RestrictedPathError("access to path '%s' is forbidden", absPath);
|
||||
}
|
||||
|
||||
bool isAllowed(const CanonPath & absPath)
|
||||
{
|
||||
if (!absPath.isWithin(root))
|
||||
return false;
|
||||
|
||||
if (allowedPaths) {
|
||||
auto p = absPath.removePrefix(root);
|
||||
if (!p.isAllowed(*allowedPaths))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void allowPath(CanonPath path) override
|
||||
{
|
||||
if (allowedPaths)
|
||||
allowedPaths->insert(std::move(path));
|
||||
}
|
||||
|
||||
bool hasAccessControl() override
|
||||
{
|
||||
return (bool) allowedPaths;
|
||||
}
|
||||
|
||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
if (isAllowed(absPath))
|
||||
return absPath;
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
};
|
||||
|
||||
ref<FSInputAccessor> makeFSInputAccessor(
|
||||
const CanonPath & root,
|
||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
{
|
||||
return make_ref<FSInputAccessorImpl>(root, std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||
}
|
||||
|
||||
ref<FSInputAccessor> makeStorePathAccessor(
|
||||
ref<Store> store,
|
||||
const StorePath & storePath,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
{
|
||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError));
|
||||
}
|
||||
|
||||
}
|
29
src/libfetchers/fs-input-accessor.hh
Normal file
29
src/libfetchers/fs-input-accessor.hh
Normal file
|
@ -0,0 +1,29 @@
|
|||
#pragma once
|
||||
|
||||
#include "input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class StorePath;
|
||||
class Store;
|
||||
|
||||
struct FSInputAccessor : InputAccessor
|
||||
{
|
||||
virtual void checkAllowed(const CanonPath & absPath) = 0;
|
||||
|
||||
virtual void allowPath(CanonPath path) = 0;
|
||||
|
||||
virtual bool hasAccessControl() = 0;
|
||||
};
|
||||
|
||||
ref<FSInputAccessor> makeFSInputAccessor(
|
||||
const CanonPath & root,
|
||||
std::optional<std::set<CanonPath>> && allowedPaths = {},
|
||||
MakeNotAllowedError && makeNotAllowedError = {});
|
||||
|
||||
ref<FSInputAccessor> makeStorePathAccessor(
|
||||
ref<Store> store,
|
||||
const StorePath & storePath,
|
||||
MakeNotAllowedError && makeNotAllowedError = {});
|
||||
|
||||
}
|
|
@ -7,6 +7,7 @@
|
|||
#include "pathlocks.hh"
|
||||
#include "util.hh"
|
||||
#include "git.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
|
@ -18,6 +19,7 @@
|
|||
using namespace std::string_literals;
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
namespace {
|
||||
|
||||
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
|
||||
|
@ -26,23 +28,23 @@ namespace {
|
|||
// old version of git, which will ignore unrecognized `-c` options.
|
||||
const std::string gitInitialBranch = "__nix_dummy_branch";
|
||||
|
||||
bool isCacheFileWithinTtl(const time_t now, const struct stat & st)
|
||||
bool isCacheFileWithinTtl(time_t now, const struct stat & st)
|
||||
{
|
||||
return st.st_mtime + settings.tarballTtl > now;
|
||||
}
|
||||
|
||||
bool touchCacheFile(const Path& path, const time_t& touch_time)
|
||||
bool touchCacheFile(const Path & path, time_t touch_time)
|
||||
{
|
||||
struct timeval times[2];
|
||||
times[0].tv_sec = touch_time;
|
||||
times[0].tv_usec = 0;
|
||||
times[1].tv_sec = touch_time;
|
||||
times[1].tv_usec = 0;
|
||||
struct timeval times[2];
|
||||
times[0].tv_sec = touch_time;
|
||||
times[0].tv_usec = 0;
|
||||
times[1].tv_sec = touch_time;
|
||||
times[1].tv_usec = 0;
|
||||
|
||||
return lutimes(path.c_str(), times) == 0;
|
||||
return lutimes(path.c_str(), times) == 0;
|
||||
}
|
||||
|
||||
Path getCachePath(std::string key)
|
||||
Path getCachePath(std::string_view key)
|
||||
{
|
||||
return getCacheDir() + "/nix/gitv3/" +
|
||||
hashString(htSHA256, key).to_string(Base32, false);
|
||||
|
@ -57,13 +59,12 @@ Path getCachePath(std::string key)
|
|||
// ...
|
||||
std::optional<std::string> readHead(const Path & path)
|
||||
{
|
||||
auto [exit_code, output] = runProgram(RunOptions {
|
||||
auto [status, output] = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
// FIXME: use 'HEAD' to avoid returning all refs
|
||||
.args = {"ls-remote", "--symref", path},
|
||||
});
|
||||
if (exit_code != 0) {
|
||||
return std::nullopt;
|
||||
}
|
||||
if (status != 0) return std::nullopt;
|
||||
|
||||
std::string_view line = output;
|
||||
line = line.substr(0, line.find("\n"));
|
||||
|
@ -82,12 +83,11 @@ std::optional<std::string> readHead(const Path & path)
|
|||
}
|
||||
|
||||
// Persist the HEAD ref from the remote repo in the local cached repo.
|
||||
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
|
||||
bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
|
||||
{
|
||||
Path cacheDir = getCachePath(actualUrl);
|
||||
auto gitDir = ".";
|
||||
try {
|
||||
runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef });
|
||||
runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
|
||||
} catch (ExecError &e) {
|
||||
if (!WIFEXITED(e.status)) throw;
|
||||
return false;
|
||||
|
@ -96,7 +96,7 @@ bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
|
|||
return true;
|
||||
}
|
||||
|
||||
std::optional<std::string> readHeadCached(const std::string& actualUrl)
|
||||
std::optional<std::string> readHeadCached(const std::string & actualUrl)
|
||||
{
|
||||
// Create a cache path to store the branch of the HEAD ref. Append something
|
||||
// in front of the URL to prevent collision with the repository itself.
|
||||
|
@ -110,16 +110,15 @@ std::optional<std::string> readHeadCached(const std::string& actualUrl)
|
|||
cachedRef = readHead(cacheDir);
|
||||
if (cachedRef != std::nullopt &&
|
||||
*cachedRef != gitInitialBranch &&
|
||||
isCacheFileWithinTtl(now, st)) {
|
||||
isCacheFileWithinTtl(now, st))
|
||||
{
|
||||
debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl);
|
||||
return cachedRef;
|
||||
}
|
||||
}
|
||||
|
||||
auto ref = readHead(actualUrl);
|
||||
if (ref) {
|
||||
return ref;
|
||||
}
|
||||
if (ref) return ref;
|
||||
|
||||
if (cachedRef) {
|
||||
// If the cached git ref is expired in fetch() below, and the 'git fetch'
|
||||
|
@ -138,119 +137,11 @@ bool isNotDotGitDirectory(const Path & path)
|
|||
return baseNameOf(path) != ".git";
|
||||
}
|
||||
|
||||
struct WorkdirInfo
|
||||
{
|
||||
bool clean = false;
|
||||
bool hasHead = false;
|
||||
};
|
||||
|
||||
// Returns whether a git workdir is clean and has commits.
|
||||
WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
|
||||
{
|
||||
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||
std::string gitDir(".git");
|
||||
|
||||
auto env = getEnv();
|
||||
// Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
|
||||
// that way unknown errors can lead to a failure instead of continuing through the wrong code path
|
||||
env["LC_ALL"] = "C";
|
||||
|
||||
/* Check whether HEAD points to something that looks like a commit,
|
||||
since that is the refrence we want to use later on. */
|
||||
auto result = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
|
||||
.environment = env,
|
||||
.mergeStderrToStdout = true
|
||||
});
|
||||
auto exitCode = WEXITSTATUS(result.first);
|
||||
auto errorMessage = result.second;
|
||||
|
||||
if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
|
||||
throw Error("'%s' is not a Git repository", workdir);
|
||||
} else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
|
||||
// indicates that the repo does not have any commits
|
||||
// we want to proceed and will consider it dirty later
|
||||
} else if (exitCode != 0) {
|
||||
// any other errors should lead to a failure
|
||||
throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage);
|
||||
}
|
||||
|
||||
bool clean = false;
|
||||
bool hasHead = exitCode == 0;
|
||||
|
||||
try {
|
||||
if (hasHead) {
|
||||
// Using git diff is preferrable over lower-level operations here,
|
||||
// because its conceptually simpler and we only need the exit code anyways.
|
||||
auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
|
||||
if (!submodules) {
|
||||
// Changes in submodules should only make the tree dirty
|
||||
// when those submodules will be copied as well.
|
||||
gitDiffOpts.emplace_back("--ignore-submodules");
|
||||
}
|
||||
gitDiffOpts.emplace_back("--");
|
||||
runProgram("git", true, gitDiffOpts);
|
||||
|
||||
clean = true;
|
||||
}
|
||||
} catch (ExecError & e) {
|
||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
||||
}
|
||||
|
||||
return WorkdirInfo { .clean = clean, .hasHead = hasHead };
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
|
||||
{
|
||||
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||
auto gitDir = ".git";
|
||||
|
||||
if (!fetchSettings.allowDirty)
|
||||
throw Error("Git tree '%s' is dirty", workdir);
|
||||
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("Git tree '%s' is dirty", workdir);
|
||||
|
||||
auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
|
||||
if (submodules)
|
||||
gitOpts.emplace_back("--recurse-submodules");
|
||||
|
||||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runProgram("git", true, gitOpts), "\0"s);
|
||||
|
||||
Path actualPath(absPath(workdir));
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, actualPath));
|
||||
std::string file(p, actualPath.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
auto prefix = file + "/";
|
||||
auto i = files.lower_bound(prefix);
|
||||
return i != files.end() && hasPrefix(*i, prefix);
|
||||
}
|
||||
|
||||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
// FIXME: maybe we should use the timestamp of the last
|
||||
// modified dirty file?
|
||||
input.attrs.insert_or_assign(
|
||||
"lastModified",
|
||||
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
} // end namespace
|
||||
|
||||
struct GitInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) const override
|
||||
{
|
||||
if (url.scheme != "git" &&
|
||||
url.scheme != "git+http" &&
|
||||
|
@ -266,7 +157,7 @@ struct GitInputScheme : InputScheme
|
|||
Attrs attrs;
|
||||
attrs.emplace("type", "git");
|
||||
|
||||
for (auto &[name, value] : url.query) {
|
||||
for (auto & [name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref")
|
||||
attrs.emplace(name, value);
|
||||
else if (name == "shallow" || name == "submodules")
|
||||
|
@ -280,7 +171,7 @@ struct GitInputScheme : InputScheme
|
|||
return inputFromAttrs(attrs);
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
if (maybeGetStrAttr(attrs, "type") != "git") return {};
|
||||
|
||||
|
@ -303,7 +194,7 @@ struct GitInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) override
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
|
||||
|
@ -314,19 +205,10 @@ struct GitInputScheme : InputScheme
|
|||
return url;
|
||||
}
|
||||
|
||||
bool hasAllInfo(const Input & input) override
|
||||
{
|
||||
bool maybeDirty = !input.getRef();
|
||||
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
||||
return
|
||||
maybeGetIntAttr(input.attrs, "lastModified")
|
||||
&& (shallow || maybeDirty || maybeGetIntAttr(input.attrs, "revCount"));
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) override
|
||||
std::optional<Hash> rev) const override
|
||||
{
|
||||
auto res(input);
|
||||
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
@ -336,13 +218,13 @@ struct GitInputScheme : InputScheme
|
|||
return res;
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) override
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto [isLocal, actualUrl] = getActualUrl(input);
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
|
||||
Strings args = {"clone"};
|
||||
|
||||
args.push_back(actualUrl);
|
||||
args.push_back(repoInfo.url);
|
||||
|
||||
if (auto ref = input.getRef()) {
|
||||
args.push_back("--branch");
|
||||
|
@ -356,30 +238,91 @@ struct GitInputScheme : InputScheme
|
|||
runProgram("git", true, args);
|
||||
}
|
||||
|
||||
std::optional<Path> getSourcePath(const Input & input) override
|
||||
void putFile(
|
||||
const Input & input,
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
||||
return url.path;
|
||||
return {};
|
||||
}
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
if (!repoInfo.isLocal)
|
||||
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
||||
|
||||
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
||||
{
|
||||
auto sourcePath = getSourcePath(input);
|
||||
assert(sourcePath);
|
||||
auto gitDir = ".git";
|
||||
auto absPath = CanonPath(repoInfo.url) + path;
|
||||
|
||||
// FIXME: make sure that absPath is not a symlink that escapes
|
||||
// the repo.
|
||||
writeFile(absPath.abs(), contents);
|
||||
|
||||
runProgram("git", true,
|
||||
{ "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) });
|
||||
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
|
||||
|
||||
if (commitMsg)
|
||||
runProgram("git", true,
|
||||
{ "-C", *sourcePath, "--git-dir", gitDir, "commit", std::string(file), "-m", *commitMsg });
|
||||
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg });
|
||||
}
|
||||
|
||||
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||
struct RepoInfo
|
||||
{
|
||||
bool shallow = false;
|
||||
bool submodules = false;
|
||||
bool allRefs = false;
|
||||
|
||||
std::string cacheType;
|
||||
|
||||
/* Whether this is a local, non-bare repository. */
|
||||
bool isLocal = false;
|
||||
|
||||
/* Whether this is a local, non-bare, dirty repository. */
|
||||
bool isDirty = false;
|
||||
|
||||
/* Whether this repository has any commits. */
|
||||
bool hasHead = true;
|
||||
|
||||
/* URL of the repo, or its path if isLocal. */
|
||||
std::string url;
|
||||
|
||||
void warnDirty() const
|
||||
{
|
||||
if (isDirty) {
|
||||
if (!fetchSettings.allowDirty)
|
||||
throw Error("Git tree '%s' is dirty", url);
|
||||
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("Git tree '%s' is dirty", url);
|
||||
}
|
||||
}
|
||||
|
||||
std::string gitDir = ".git";
|
||||
};
|
||||
|
||||
bool getSubmodulesAttr(const Input & input) const
|
||||
{
|
||||
return maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||
}
|
||||
|
||||
RepoInfo getRepoInfo(const Input & input) const
|
||||
{
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
|
||||
};
|
||||
|
||||
if (auto rev = input.getRev())
|
||||
checkHashType(rev);
|
||||
|
||||
RepoInfo repoInfo {
|
||||
.shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false),
|
||||
.submodules = getSubmodulesAttr(input),
|
||||
.allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false)
|
||||
};
|
||||
|
||||
repoInfo.cacheType = "git";
|
||||
if (repoInfo.shallow) repoInfo.cacheType += "-shallow";
|
||||
if (repoInfo.submodules) repoInfo.cacheType += "-submodules";
|
||||
if (repoInfo.allRefs) repoInfo.cacheType += "-all-refs";
|
||||
|
||||
// file:// URIs are normally not cloned (but otherwise treated the
|
||||
// same as remote URIs, i.e. we don't use the working tree or
|
||||
// HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git
|
||||
|
@ -387,52 +330,194 @@ struct GitInputScheme : InputScheme
|
|||
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
|
||||
bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
|
||||
return {isLocal, isLocal ? url.path : url.base};
|
||||
repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
|
||||
repoInfo.url = repoInfo.isLocal ? url.path : url.base;
|
||||
|
||||
// If this is a local directory and no ref or revision is
|
||||
// given, then allow the use of an unclean working tree.
|
||||
if (!input.getRef() && !input.getRev() && repoInfo.isLocal) {
|
||||
repoInfo.isDirty = true;
|
||||
|
||||
auto env = getEnv();
|
||||
/* Set LC_ALL to C: because we rely on the error messages
|
||||
from git rev-parse to determine what went wrong that
|
||||
way unknown errors can lead to a failure instead of
|
||||
continuing through the wrong code path. */
|
||||
env["LC_ALL"] = "C";
|
||||
|
||||
/* Check whether HEAD points to something that looks like
|
||||
a commit, since that is the ref we want to use later
|
||||
on. */
|
||||
auto result = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.args = { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
|
||||
.environment = env,
|
||||
.mergeStderrToStdout = true
|
||||
});
|
||||
auto exitCode = WEXITSTATUS(result.first);
|
||||
auto errorMessage = result.second;
|
||||
|
||||
if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
|
||||
throw Error("'%s' is not a Git repository", repoInfo.url);
|
||||
} else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
|
||||
// indicates that the repo does not have any commits
|
||||
// we want to proceed and will consider it dirty later
|
||||
} else if (exitCode != 0) {
|
||||
// any other errors should lead to a failure
|
||||
throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", repoInfo.url, exitCode, errorMessage);
|
||||
}
|
||||
|
||||
repoInfo.hasHead = exitCode == 0;
|
||||
|
||||
try {
|
||||
if (repoInfo.hasHead) {
|
||||
// Using git diff is preferrable over lower-level operations here,
|
||||
// because it's conceptually simpler and we only need the exit code anyways.
|
||||
auto gitDiffOpts = Strings({ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "diff", "HEAD", "--quiet"});
|
||||
if (!repoInfo.submodules) {
|
||||
// Changes in submodules should only make the tree dirty
|
||||
// when those submodules will be copied as well.
|
||||
gitDiffOpts.emplace_back("--ignore-submodules");
|
||||
}
|
||||
gitDiffOpts.emplace_back("--");
|
||||
runProgram("git", true, gitDiffOpts);
|
||||
|
||||
repoInfo.isDirty = false;
|
||||
}
|
||||
} catch (ExecError & e) {
|
||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
||||
}
|
||||
}
|
||||
|
||||
return repoInfo;
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::set<CanonPath> listFiles(const RepoInfo & repoInfo) const
|
||||
{
|
||||
Input input(_input);
|
||||
auto gitDir = ".git";
|
||||
auto gitOpts = Strings({ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "ls-files", "-z" });
|
||||
if (repoInfo.submodules)
|
||||
gitOpts.emplace_back("--recurse-submodules");
|
||||
|
||||
std::set<CanonPath> res;
|
||||
|
||||
for (auto & p : tokenizeString<std::set<std::string>>(
|
||||
runProgram("git", true, gitOpts), "\0"s))
|
||||
res.insert(CanonPath(p));
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
Hash updateRev(Input & input, const RepoInfo & repoInfo, const std::string & ref) const
|
||||
{
|
||||
if (auto r = input.getRev())
|
||||
return *r;
|
||||
else {
|
||||
auto rev = Hash::parseAny(chomp(runProgram("git", true, { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "rev-parse", ref })), htSHA1);
|
||||
input.attrs.insert_or_assign("rev", rev.gitRev());
|
||||
return rev;
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const std::string & ref) const
|
||||
{
|
||||
return
|
||||
repoInfo.hasHead
|
||||
? std::stoull(
|
||||
runProgram("git", true,
|
||||
{ "-C", repoDir, "--git-dir", repoInfo.gitDir, "log", "-1", "--format=%ct", "--no-show-signature", ref }))
|
||||
: 0;
|
||||
}
|
||||
|
||||
uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
||||
{
|
||||
if (!repoInfo.hasHead) return 0;
|
||||
|
||||
auto key = fmt("git-%s-last-modified", rev.gitRev());
|
||||
|
||||
auto cache = getCache();
|
||||
|
||||
if (auto lastModifiedS = cache->queryFact(key)) {
|
||||
if (auto lastModified = string2Int<uint64_t>(*lastModifiedS))
|
||||
return *lastModified;
|
||||
}
|
||||
|
||||
auto lastModified = getLastModified(repoInfo, repoDir, rev.gitRev());
|
||||
|
||||
cache->upsertFact(key, std::to_string(lastModified));
|
||||
|
||||
return lastModified;
|
||||
}
|
||||
|
||||
uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
||||
{
|
||||
if (!repoInfo.hasHead) return 0;
|
||||
|
||||
auto key = fmt("git-%s-revcount", rev.gitRev());
|
||||
|
||||
auto cache = getCache();
|
||||
|
||||
if (auto revCountS = cache->queryFact(key)) {
|
||||
if (auto revCount = string2Int<uint64_t>(*revCountS))
|
||||
return *revCount;
|
||||
}
|
||||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
|
||||
|
||||
auto revCount = std::stoull(
|
||||
runProgram("git", true,
|
||||
{ "-C", repoDir, "--git-dir", repoInfo.gitDir, "rev-list", "--count", rev.gitRev() }));
|
||||
|
||||
cache->upsertFact(key, std::to_string(revCount));
|
||||
|
||||
return revCount;
|
||||
}
|
||||
|
||||
std::string getDefaultRef(const RepoInfo & repoInfo) const
|
||||
{
|
||||
auto head = repoInfo.isLocal
|
||||
? readHead(repoInfo.url)
|
||||
: readHeadCached(repoInfo.url);
|
||||
if (!head) {
|
||||
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
|
||||
return "master";
|
||||
}
|
||||
return *head;
|
||||
}
|
||||
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
RepoInfo & repoInfo,
|
||||
Input & input) const
|
||||
{
|
||||
assert(!repoInfo.isDirty);
|
||||
|
||||
auto origRev = input.getRev();
|
||||
|
||||
std::string name = input.getName();
|
||||
|
||||
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
||||
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
|
||||
|
||||
std::string cacheType = "git";
|
||||
if (shallow) cacheType += "-shallow";
|
||||
if (submodules) cacheType += "-submodules";
|
||||
if (allRefs) cacheType += "-all-refs";
|
||||
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
|
||||
};
|
||||
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
checkHashType(input.getRev());
|
||||
|
||||
return Attrs({
|
||||
{"type", cacheType},
|
||||
{"type", repoInfo.cacheType},
|
||||
{"name", name},
|
||||
{"rev", input.getRev()->gitRev()},
|
||||
});
|
||||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||
-> std::pair<StorePath, Input>
|
||||
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
|
||||
{
|
||||
assert(input.getRev());
|
||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||
if (!shallow)
|
||||
assert(!origRev || origRev == input.getRev());
|
||||
if (!repoInfo.shallow)
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
||||
return {std::move(storePath), input};
|
||||
|
||||
// FIXME: remove?
|
||||
//input.attrs.erase("narHash");
|
||||
auto narHash = store->queryPathInfo(storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
|
||||
return storePath;
|
||||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
|
@ -440,54 +525,23 @@ struct GitInputScheme : InputScheme
|
|||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
auto [isLocal, actualUrl_] = getActualUrl(input);
|
||||
auto actualUrl = actualUrl_; // work around clang bug
|
||||
|
||||
/* If this is a local directory and no ref or revision is given,
|
||||
allow fetching directly from a dirty workdir. */
|
||||
if (!input.getRef() && !input.getRev() && isLocal) {
|
||||
auto workdirInfo = getWorkdirInfo(input, actualUrl);
|
||||
if (!workdirInfo.clean) {
|
||||
return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
|
||||
}
|
||||
}
|
||||
auto originalRef = input.getRef();
|
||||
auto ref = originalRef ? *originalRef : getDefaultRef(repoInfo);
|
||||
input.attrs.insert_or_assign("ref", ref);
|
||||
|
||||
Attrs unlockedAttrs({
|
||||
{"type", cacheType},
|
||||
{"type", repoInfo.cacheType},
|
||||
{"name", name},
|
||||
{"url", actualUrl},
|
||||
{"url", repoInfo.url},
|
||||
{"ref", ref},
|
||||
});
|
||||
|
||||
Path repoDir;
|
||||
|
||||
if (isLocal) {
|
||||
if (!input.getRef()) {
|
||||
auto head = readHead(actualUrl);
|
||||
if (!head) {
|
||||
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
|
||||
head = "master";
|
||||
}
|
||||
input.attrs.insert_or_assign("ref", *head);
|
||||
unlockedAttrs.insert_or_assign("ref", *head);
|
||||
}
|
||||
|
||||
if (!input.getRev())
|
||||
input.attrs.insert_or_assign("rev",
|
||||
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev());
|
||||
|
||||
repoDir = actualUrl;
|
||||
if (repoInfo.isLocal) {
|
||||
updateRev(input, repoInfo, ref);
|
||||
repoDir = repoInfo.url;
|
||||
} else {
|
||||
const bool useHeadRef = !input.getRef();
|
||||
if (useHeadRef) {
|
||||
auto head = readHeadCached(actualUrl);
|
||||
if (!head) {
|
||||
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
|
||||
head = "master";
|
||||
}
|
||||
input.attrs.insert_or_assign("ref", *head);
|
||||
unlockedAttrs.insert_or_assign("ref", *head);
|
||||
}
|
||||
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
|
@ -496,9 +550,9 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
}
|
||||
|
||||
Path cacheDir = getCachePath(actualUrl);
|
||||
Path cacheDir = getCachePath(repoInfo.url);
|
||||
repoDir = cacheDir;
|
||||
gitDir = ".";
|
||||
repoInfo.gitDir = ".";
|
||||
|
||||
createDirs(dirOf(cacheDir));
|
||||
PathLocks cacheDirLock({cacheDir + ".lock"});
|
||||
|
@ -508,9 +562,9 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
|
||||
Path localRefFile =
|
||||
input.getRef()->compare(0, 5, "refs/") == 0
|
||||
? cacheDir + "/" + *input.getRef()
|
||||
: cacheDir + "/refs/heads/" + *input.getRef();
|
||||
ref.compare(0, 5, "refs/") == 0
|
||||
? cacheDir + "/" + ref
|
||||
: cacheDir + "/refs/heads/" + ref;
|
||||
|
||||
bool doFetch;
|
||||
time_t now = time(0);
|
||||
|
@ -519,7 +573,7 @@ struct GitInputScheme : InputScheme
|
|||
repo. */
|
||||
if (input.getRev()) {
|
||||
try {
|
||||
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() });
|
||||
runProgram("git", true, { "-C", repoDir, "--git-dir", repoInfo.gitDir, "cat-file", "-e", input.getRev()->gitRev() });
|
||||
doFetch = false;
|
||||
} catch (ExecError & e) {
|
||||
if (WIFEXITED(e.status)) {
|
||||
|
@ -529,7 +583,7 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
}
|
||||
} else {
|
||||
if (allRefs) {
|
||||
if (repoInfo.allRefs) {
|
||||
doFetch = true;
|
||||
} else {
|
||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||
|
@ -541,29 +595,37 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
|
||||
if (doFetch) {
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", repoInfo.url));
|
||||
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
try {
|
||||
auto ref = input.getRef();
|
||||
auto fetchRef = allRefs
|
||||
auto fetchRef = repoInfo.allRefs
|
||||
? "refs/*"
|
||||
: ref->compare(0, 5, "refs/") == 0
|
||||
? *ref
|
||||
: ref.compare(0, 5, "refs/") == 0
|
||||
? ref
|
||||
: ref == "HEAD"
|
||||
? *ref
|
||||
: "refs/heads/" + *ref;
|
||||
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
||||
? ref
|
||||
: "refs/heads/" + ref;
|
||||
runProgram("git", true,
|
||||
{ "-C", repoDir,
|
||||
"--git-dir", repoInfo.gitDir,
|
||||
"fetch",
|
||||
"--quiet",
|
||||
"--force",
|
||||
"--",
|
||||
repoInfo.url,
|
||||
fmt("%s:%s", fetchRef, fetchRef)
|
||||
});
|
||||
} catch (Error & e) {
|
||||
if (!pathExists(localRefFile)) throw;
|
||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url);
|
||||
}
|
||||
|
||||
if (!touchCacheFile(localRefFile, now))
|
||||
warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno));
|
||||
if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef()))
|
||||
warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl);
|
||||
if (!originalRef && !storeCachedHead(repoInfo.url, ref))
|
||||
warn("could not update cached head '%s' for '%s'", ref, repoInfo.url);
|
||||
}
|
||||
|
||||
if (!input.getRev())
|
||||
|
@ -572,14 +634,14 @@ struct GitInputScheme : InputScheme
|
|||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
}
|
||||
|
||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", repoInfo.gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||
|
||||
if (isShallow && !shallow)
|
||||
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
|
||||
if (isShallow && !repoInfo.shallow)
|
||||
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url);
|
||||
|
||||
// FIXME: check whether rev is an ancestor of ref.
|
||||
|
||||
printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), actualUrl);
|
||||
printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), repoInfo.url);
|
||||
|
||||
/* Now that we know the ref, check again whether we have it in
|
||||
the store. */
|
||||
|
@ -592,7 +654,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
auto result = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() },
|
||||
.args = { "-C", repoDir, "--git-dir", repoInfo.gitDir, "cat-file", "commit", input.getRev()->gitRev() },
|
||||
.mergeStderrToStdout = true
|
||||
});
|
||||
if (WEXITSTATUS(result.first) == 128
|
||||
|
@ -600,16 +662,18 @@ struct GitInputScheme : InputScheme
|
|||
{
|
||||
throw Error(
|
||||
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
|
||||
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
|
||||
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
|
||||
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
|
||||
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
|
||||
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
|
||||
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
|
||||
input.getRev()->gitRev(),
|
||||
*input.getRef(),
|
||||
actualUrl
|
||||
ref,
|
||||
repoInfo.url
|
||||
);
|
||||
}
|
||||
|
||||
if (submodules) {
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("copying Git tree '%s' to the store", input.to_string()));
|
||||
|
||||
if (repoInfo.submodules) {
|
||||
Path tmpGitDir = createTempDir();
|
||||
AutoDelete delTmpGitDir(tmpGitDir, true);
|
||||
|
||||
|
@ -621,7 +685,7 @@ struct GitInputScheme : InputScheme
|
|||
"--update-head-ok", "--", repoDir, "refs/*:refs/*" });
|
||||
|
||||
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
|
||||
runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", actualUrl });
|
||||
runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", repoInfo.url });
|
||||
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
|
||||
|
||||
filter = isNotDotGitDirectory;
|
||||
|
@ -631,7 +695,7 @@ struct GitInputScheme : InputScheme
|
|||
auto source = sinkToSource([&](Sink & sink) {
|
||||
runProgram2({
|
||||
.program = "git",
|
||||
.args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() },
|
||||
.args = { "-C", repoDir, "--git-dir", repoInfo.gitDir, "archive", input.getRev()->gitRev() },
|
||||
.standardOut = &sink
|
||||
});
|
||||
});
|
||||
|
@ -641,18 +705,18 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
|
||||
auto rev = *input.getRev();
|
||||
|
||||
Attrs infoAttrs({
|
||||
{"rev", input.getRev()->gitRev()},
|
||||
{"lastModified", lastModified},
|
||||
{"rev", rev.gitRev()},
|
||||
{"lastModified", getLastModified(repoInfo, repoDir, rev)},
|
||||
});
|
||||
|
||||
if (!shallow)
|
||||
if (!repoInfo.shallow)
|
||||
infoAttrs.insert_or_assign("revCount",
|
||||
std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() })));
|
||||
getRevCount(repoInfo, repoDir, rev));
|
||||
|
||||
if (!_input.getRev())
|
||||
if (!origRev)
|
||||
getCache()->add(
|
||||
store,
|
||||
unlockedAttrs,
|
||||
|
@ -669,6 +733,70 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
return makeResult(infoAttrs, std::move(storePath));
|
||||
}
|
||||
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
|
||||
auto makeNotAllowedError = [url{repoInfo.url}](const CanonPath & path) -> RestrictedPathError
|
||||
{
|
||||
if (nix::pathExists(path.abs()))
|
||||
return RestrictedPathError("access to path '%s' is forbidden because it is not under Git control; maybe you should 'git add' it to the repository '%s'?", path, url);
|
||||
else
|
||||
return RestrictedPathError("path '%s' does not exist in Git repository '%s'", path, url);
|
||||
};
|
||||
|
||||
/* Unless we're using the working tree, copy the tree into the
|
||||
Nix store. TODO: We could have an accessor for fetching
|
||||
files from the Git repository directly. */
|
||||
if (input.getRef() || input.getRev() || !repoInfo.isLocal) {
|
||||
auto storePath = fetchToStore(store, repoInfo, input);
|
||||
auto accessor = makeStorePathAccessor(store, storePath, std::move(makeNotAllowedError));
|
||||
accessor->setPathDisplay("«" + input.to_string() + "»");
|
||||
return {accessor, input};
|
||||
}
|
||||
|
||||
if (!repoInfo.isDirty) {
|
||||
auto ref = getDefaultRef(repoInfo);
|
||||
input.attrs.insert_or_assign("ref", ref);
|
||||
|
||||
auto rev = updateRev(input, repoInfo, ref);
|
||||
|
||||
input.attrs.insert_or_assign(
|
||||
"revCount",
|
||||
getRevCount(repoInfo, repoInfo.url, rev));
|
||||
|
||||
input.attrs.insert_or_assign(
|
||||
"lastModified",
|
||||
getLastModified(repoInfo, repoInfo.url, rev));
|
||||
} else {
|
||||
repoInfo.warnDirty();
|
||||
|
||||
// FIXME: maybe we should use the timestamp of the last
|
||||
// modified dirty file?
|
||||
input.attrs.insert_or_assign(
|
||||
"lastModified",
|
||||
getLastModified(repoInfo, repoInfo.url, "HEAD"));
|
||||
}
|
||||
|
||||
return {makeFSInputAccessor(CanonPath(repoInfo.url), listFiles(repoInfo), std::move(makeNotAllowedError)), input};
|
||||
}
|
||||
|
||||
bool isLocked(const Input & input) const override
|
||||
{
|
||||
return (bool) input.getRev();
|
||||
}
|
||||
|
||||
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
if (auto rev = input.getRev()) {
|
||||
return fmt("%s;%s", rev->gitRev(), getSubmodulesAttr(input) ? "1" : "0");
|
||||
} else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
static auto rGitInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
#include "git.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "fetch-settings.hh"
|
||||
#include "input-accessor.hh"
|
||||
#include "tarball.hh"
|
||||
|
||||
#include <optional>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -26,11 +28,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
|||
|
||||
struct GitArchiveInputScheme : InputScheme
|
||||
{
|
||||
virtual std::string type() = 0;
|
||||
virtual std::string type() const = 0;
|
||||
|
||||
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
|
||||
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) const override
|
||||
{
|
||||
if (url.scheme != type()) return {};
|
||||
|
||||
|
@ -100,7 +102,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
if (maybeGetStrAttr(attrs, "type") != type()) return {};
|
||||
|
||||
|
@ -116,7 +118,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) override
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
auto owner = getStrAttr(input.attrs, "owner");
|
||||
auto repo = getStrAttr(input.attrs, "repo");
|
||||
|
@ -132,15 +134,10 @@ struct GitArchiveInputScheme : InputScheme
|
|||
};
|
||||
}
|
||||
|
||||
bool hasAllInfo(const Input & input) override
|
||||
{
|
||||
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & _input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) override
|
||||
std::optional<Hash> rev) const override
|
||||
{
|
||||
auto input(_input);
|
||||
if (rev && ref)
|
||||
|
@ -183,10 +180,8 @@ struct GitArchiveInputScheme : InputScheme
|
|||
|
||||
virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> downloadArchive(ref<Store> store, Input input) const
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
|
||||
|
||||
auto rev = input.getRev();
|
||||
|
@ -196,38 +191,53 @@ struct GitArchiveInputScheme : InputScheme
|
|||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
||||
Attrs lockedAttrs({
|
||||
{"type", "git-tarball"},
|
||||
{"type", "git-zipball"},
|
||||
{"rev", rev->gitRev()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, lockedAttrs)) {
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
||||
return {std::move(res->second), input};
|
||||
}
|
||||
if (auto res = getCache()->lookup(store, lockedAttrs))
|
||||
return {std::move(res->second), std::move(input)};
|
||||
|
||||
auto url = getDownloadUrl(input);
|
||||
|
||||
auto [tree, lastModified] = downloadTarball(store, url.url, input.getName(), true, url.headers);
|
||||
|
||||
input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
|
||||
auto res = downloadFile(store, url.url, input.getName(), true, url.headers);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
lockedAttrs,
|
||||
{
|
||||
{"rev", rev->gitRev()},
|
||||
{"lastModified", uint64_t(lastModified)}
|
||||
},
|
||||
tree.storePath,
|
||||
res.storePath,
|
||||
true);
|
||||
|
||||
return {std::move(tree.storePath), input};
|
||||
return {res.storePath, std::move(input)};
|
||||
}
|
||||
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
auto [storePath, input2] = downloadArchive(store, input);
|
||||
|
||||
auto accessor = makeZipInputAccessor(CanonPath(store->toRealPath(storePath)));
|
||||
|
||||
auto lastModified = accessor->getLastModified();
|
||||
assert(lastModified);
|
||||
input2.attrs.insert_or_assign("lastModified", uint64_t(*lastModified));
|
||||
|
||||
accessor->setPathDisplay("«" + input2.to_string() + "»");
|
||||
|
||||
return {accessor, input2};
|
||||
}
|
||||
|
||||
bool isLocked(const Input & input) const override
|
||||
{
|
||||
return (bool) input.getRev();
|
||||
}
|
||||
};
|
||||
|
||||
struct GitHubInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string type() override { return "github"; }
|
||||
std::string type() const override { return "github"; }
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
|
@ -240,14 +250,29 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
|||
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
|
||||
}
|
||||
|
||||
std::string getHost(const Input & input) const
|
||||
{
|
||||
return maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
||||
}
|
||||
|
||||
std::string getOwner(const Input & input) const
|
||||
{
|
||||
return getStrAttr(input.attrs, "owner");
|
||||
}
|
||||
|
||||
std::string getRepo(const Input & input) const
|
||||
{
|
||||
return getStrAttr(input.attrs, "repo");
|
||||
}
|
||||
|
||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
||||
auto host = getHost(input);
|
||||
auto url = fmt(
|
||||
host == "github.com"
|
||||
? "https://api.%s/repos/%s/%s/commits/%s"
|
||||
: "https://%s/api/v3/repos/%s/%s/commits/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||
host, getOwner(input), getRepo(input), *input.getRef());
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
|
||||
|
@ -264,31 +289,42 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
|||
{
|
||||
// FIXME: use regular /archive URLs instead? api.github.com
|
||||
// might have stricter rate limits.
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
||||
auto host = getHost(input);
|
||||
auto url = fmt(
|
||||
host == "github.com"
|
||||
? "https://api.%s/repos/%s/%s/tarball/%s"
|
||||
: "https://%s/api/v3/repos/%s/%s/tarball/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
? "https://api.%s/repos/%s/%s/zipball/%s"
|
||||
: "https://%s/api/v3/repos/%s/%s/zipball/%s",
|
||||
host, getOwner(input), getRepo(input),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
return DownloadUrl { url, headers };
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) override
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
||||
auto host = getHost(input);
|
||||
Input::fromURL(fmt("git+https://%s/%s/%s.git",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
host, getOwner(input), getRepo(input)))
|
||||
.applyOverrides(input.getRef(), input.getRev())
|
||||
.clone(destDir);
|
||||
}
|
||||
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
auto [accessor, input] = GitArchiveInputScheme::getAccessor(store, _input);
|
||||
if (getHost(input) == "github.com")
|
||||
accessor->setPathDisplay(fmt("https://github.com/%s/%s/blob/%s",
|
||||
getOwner(input),
|
||||
getRepo(input),
|
||||
input.getRev()->to_string(Base16, false)));
|
||||
return {accessor, input};
|
||||
}
|
||||
};
|
||||
|
||||
struct GitLabInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string type() override { return "gitlab"; }
|
||||
std::string type() const override { return "gitlab"; }
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
|
@ -335,7 +371,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
// is 10 reqs/sec/ip-addr. See
|
||||
// https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
|
||||
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.zip?sha=%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
|
||||
|
@ -343,7 +379,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
return DownloadUrl { url, headers };
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) override
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||
// FIXME: get username somewhere
|
||||
|
@ -356,7 +392,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
|
||||
struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string type() override { return "sourcehut"; }
|
||||
std::string type() const override { return "sourcehut"; }
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
|
@ -430,7 +466,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
|||
return DownloadUrl { url, headers };
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) override
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
Input::fromURL(fmt("git+https://%s/%s/%s",
|
||||
|
|
|
@ -7,7 +7,7 @@ std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
|||
|
||||
struct IndirectInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) const override
|
||||
{
|
||||
if (url.scheme != "flake") return {};
|
||||
|
||||
|
@ -41,7 +41,6 @@ struct IndirectInputScheme : InputScheme
|
|||
// FIXME: forbid query params?
|
||||
|
||||
Input input;
|
||||
input.direct = false;
|
||||
input.attrs.insert_or_assign("type", "indirect");
|
||||
input.attrs.insert_or_assign("id", id);
|
||||
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
@ -50,7 +49,7 @@ struct IndirectInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
|
||||
|
||||
|
@ -63,12 +62,11 @@ struct IndirectInputScheme : InputScheme
|
|||
throw BadURL("'%s' is not a valid flake ID", id);
|
||||
|
||||
Input input;
|
||||
input.direct = false;
|
||||
input.attrs = attrs;
|
||||
return input;
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) override
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
ParsedURL url;
|
||||
url.scheme = "flake";
|
||||
|
@ -78,15 +76,10 @@ struct IndirectInputScheme : InputScheme
|
|||
return url;
|
||||
}
|
||||
|
||||
bool hasAllInfo(const Input & input) override
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & _input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) override
|
||||
std::optional<Hash> rev) const override
|
||||
{
|
||||
auto input(_input);
|
||||
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
@ -94,10 +87,13 @@ struct IndirectInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
||||
}
|
||||
|
||||
bool isDirect(const Input & input) const override
|
||||
{ return false; }
|
||||
};
|
||||
|
||||
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
|
||||
|
|
260
src/libfetchers/input-accessor.cc
Normal file
260
src/libfetchers/input-accessor.cc
Normal file
|
@ -0,0 +1,260 @@
|
|||
#include "input-accessor.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
#include "cache.hh"
|
||||
|
||||
#include <atomic>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::atomic<size_t> nextNumber{0};
|
||||
|
||||
InputAccessor::InputAccessor()
|
||||
: number(++nextNumber)
|
||||
, displayPrefix{"«unknown»"}
|
||||
{
|
||||
}
|
||||
|
||||
// FIXME: merge with archive.cc.
|
||||
void InputAccessor::dumpPath(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
PathFilter & filter)
|
||||
{
|
||||
auto dumpContents = [&](const CanonPath & path)
|
||||
{
|
||||
// FIXME: pipe
|
||||
auto s = readFile(path);
|
||||
sink << "contents" << s.size();
|
||||
sink(s);
|
||||
writePadding(s.size(), sink);
|
||||
};
|
||||
|
||||
std::function<void(const CanonPath & path)> dump;
|
||||
|
||||
dump = [&](const CanonPath & path) {
|
||||
checkInterrupt();
|
||||
|
||||
auto st = lstat(path);
|
||||
|
||||
sink << "(";
|
||||
|
||||
if (st.type == tRegular) {
|
||||
sink << "type" << "regular";
|
||||
if (st.isExecutable)
|
||||
sink << "executable" << "";
|
||||
dumpContents(path);
|
||||
}
|
||||
|
||||
else if (st.type == tDirectory) {
|
||||
sink << "type" << "directory";
|
||||
|
||||
/* If we're on a case-insensitive system like macOS, undo
|
||||
the case hack applied by restorePath(). */
|
||||
std::map<std::string, std::string> unhacked;
|
||||
for (auto & i : readDirectory(path))
|
||||
if (/* archiveSettings.useCaseHack */ false) { // FIXME
|
||||
std::string name(i.first);
|
||||
size_t pos = i.first.find(caseHackSuffix);
|
||||
if (pos != std::string::npos) {
|
||||
debug("removing case hack suffix from '%s'", path + i.first);
|
||||
name.erase(pos);
|
||||
}
|
||||
if (!unhacked.emplace(name, i.first).second)
|
||||
throw Error("file name collision in between '%s' and '%s'",
|
||||
(path + unhacked[name]),
|
||||
(path + i.first));
|
||||
} else
|
||||
unhacked.emplace(i.first, i.first);
|
||||
|
||||
for (auto & i : unhacked)
|
||||
if (filter((path + i.first).abs())) {
|
||||
sink << "entry" << "(" << "name" << i.first << "node";
|
||||
dump(path + i.second);
|
||||
sink << ")";
|
||||
}
|
||||
}
|
||||
|
||||
else if (st.type == tSymlink)
|
||||
sink << "type" << "symlink" << "target" << readLink(path);
|
||||
|
||||
else throw Error("file '%s' has an unsupported type", path);
|
||||
|
||||
sink << ")";
|
||||
};
|
||||
|
||||
sink << narVersionMagic1;
|
||||
dump(path);
|
||||
}
|
||||
|
||||
StorePath InputAccessor::fetchToStore(
|
||||
ref<Store> store,
|
||||
const CanonPath & path,
|
||||
std::string_view name,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
// FIXME: add an optimisation for the case where the accessor is
|
||||
// an FSInputAccessor pointing to a store path.
|
||||
|
||||
std::optional<std::string> cacheKey;
|
||||
|
||||
if (!filter && fingerprint) {
|
||||
cacheKey = *fingerprint + "|" + name + "|" + path.abs();
|
||||
if (auto storePathS = fetchers::getCache()->queryFact(*cacheKey)) {
|
||||
if (auto storePath = store->maybeParseStorePath(*storePathS)) {
|
||||
if (store->isValidPath(*storePath)) {
|
||||
debug("store path cache hit for '%s'", showPath(path));
|
||||
return *storePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else
|
||||
debug("source path '%s' is uncacheable", showPath(path));
|
||||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
dumpPath(path, sink, filter ? *filter : defaultPathFilter);
|
||||
});
|
||||
|
||||
auto storePath =
|
||||
settings.readOnlyMode
|
||||
? store->computeStorePathFromDump(*source, name).first
|
||||
: store->addToStoreFromDump(*source, name, FileIngestionMethod::Recursive, htSHA256, repair);
|
||||
|
||||
if (cacheKey)
|
||||
fetchers::getCache()->upsertFact(*cacheKey, store->printStorePath(storePath));
|
||||
|
||||
return storePath;
|
||||
}
|
||||
|
||||
std::optional<InputAccessor::Stat> InputAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
// FIXME: merge these into one operation.
|
||||
if (!pathExists(path))
|
||||
return {};
|
||||
return lstat(path);
|
||||
}
|
||||
|
||||
void InputAccessor::setPathDisplay(std::string displayPrefix, std::string displaySuffix)
|
||||
{
|
||||
this->displayPrefix = std::move(displayPrefix);
|
||||
this->displaySuffix = std::move(displaySuffix);
|
||||
}
|
||||
|
||||
std::string InputAccessor::showPath(const CanonPath & path)
|
||||
{
|
||||
return displayPrefix + path.abs() + displaySuffix;
|
||||
}
|
||||
|
||||
SourcePath InputAccessor::root()
|
||||
{
|
||||
return {ref(shared_from_this()), CanonPath::root};
|
||||
}
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
||||
{
|
||||
str << path.to_string();
|
||||
return str;
|
||||
}
|
||||
|
||||
struct MemoryInputAccessorImpl : MemoryInputAccessor
|
||||
{
|
||||
std::map<CanonPath, std::string> files;
|
||||
|
||||
std::string readFile(const CanonPath & path) override
|
||||
{
|
||||
auto i = files.find(path);
|
||||
if (i == files.end())
|
||||
throw Error("file '%s' does not exist", path);
|
||||
return i->second;
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
auto i = files.find(path);
|
||||
return i != files.end();
|
||||
}
|
||||
|
||||
Stat lstat(const CanonPath & path) override
|
||||
{
|
||||
auto i = files.find(path);
|
||||
if (i != files.end())
|
||||
return Stat { .type = tRegular, .isExecutable = false };
|
||||
throw Error("file '%s' does not exist", path);
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
throw UnimplementedError("MemoryInputAccessor::readLink");
|
||||
}
|
||||
|
||||
SourcePath addFile(CanonPath path, std::string && contents) override
|
||||
{
|
||||
files.emplace(path, std::move(contents));
|
||||
|
||||
return {ref(shared_from_this()), std::move(path)};
|
||||
}
|
||||
};
|
||||
|
||||
ref<MemoryInputAccessor> makeMemoryInputAccessor()
|
||||
{
|
||||
return make_ref<MemoryInputAccessorImpl>();
|
||||
}
|
||||
|
||||
StorePath SourcePath::fetchToStore(
|
||||
ref<Store> store,
|
||||
std::string_view name,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair) const
|
||||
{
|
||||
return accessor->fetchToStore(store, path, name, filter, repair);
|
||||
}
|
||||
|
||||
std::string_view SourcePath::baseName() const
|
||||
{
|
||||
return path.baseName().value_or("source");
|
||||
}
|
||||
|
||||
SourcePath SourcePath::parent() const
|
||||
{
|
||||
auto p = path.parent();
|
||||
assert(p);
|
||||
return {accessor, std::move(*p)};
|
||||
}
|
||||
|
||||
SourcePath SourcePath::resolveSymlinks() const
|
||||
{
|
||||
CanonPath res("/");
|
||||
|
||||
int linksAllowed = 1024;
|
||||
|
||||
for (auto & component : path) {
|
||||
res.push(component);
|
||||
while (true) {
|
||||
if (auto st = accessor->maybeLstat(res)) {
|
||||
if (!linksAllowed--)
|
||||
throw Error("infinite symlink recursion in path '%s'", path);
|
||||
if (st->type != InputAccessor::tSymlink) break;
|
||||
auto target = accessor->readLink(res);
|
||||
if (hasPrefix(target, "/"))
|
||||
res = CanonPath(target);
|
||||
else {
|
||||
res.pop();
|
||||
res.extend(CanonPath(target));
|
||||
}
|
||||
} else
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return {accessor, res};
|
||||
}
|
||||
|
||||
}
|
184
src/libfetchers/input-accessor.hh
Normal file
184
src/libfetchers/input-accessor.hh
Normal file
|
@ -0,0 +1,184 @@
|
|||
#pragma once
|
||||
|
||||
#include "ref.hh"
|
||||
#include "types.hh"
|
||||
#include "archive.hh"
|
||||
#include "canon-path.hh"
|
||||
#include "repair-flag.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
MakeError(RestrictedPathError, Error);
|
||||
|
||||
struct SourcePath;
|
||||
class StorePath;
|
||||
class Store;
|
||||
|
||||
struct InputAccessor : public std::enable_shared_from_this<InputAccessor>
|
||||
{
|
||||
const size_t number;
|
||||
|
||||
std::string displayPrefix, displaySuffix;
|
||||
|
||||
std::optional<std::string> fingerprint;
|
||||
|
||||
InputAccessor();
|
||||
|
||||
virtual ~InputAccessor()
|
||||
{ }
|
||||
|
||||
virtual std::string readFile(const CanonPath & path) = 0;
|
||||
|
||||
virtual bool pathExists(const CanonPath & path) = 0;
|
||||
|
||||
enum Type { tRegular, tSymlink, tDirectory, tMisc };
|
||||
|
||||
struct Stat
|
||||
{
|
||||
Type type = tMisc;
|
||||
//uint64_t fileSize = 0; // regular files only
|
||||
bool isExecutable = false; // regular files only
|
||||
};
|
||||
|
||||
virtual Stat lstat(const CanonPath & path) = 0;
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path);
|
||||
|
||||
typedef std::optional<Type> DirEntry;
|
||||
|
||||
typedef std::map<std::string, DirEntry> DirEntries;
|
||||
|
||||
virtual DirEntries readDirectory(const CanonPath & path) = 0;
|
||||
|
||||
virtual std::string readLink(const CanonPath & path) = 0;
|
||||
|
||||
virtual void dumpPath(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
const CanonPath & path,
|
||||
std::string_view name,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair);
|
||||
|
||||
/* Return a corresponding path in the root filesystem, if
|
||||
possible. This is only possible for inputs that are
|
||||
materialized in the root filesystem. */
|
||||
virtual std::optional<CanonPath> getPhysicalPath(const CanonPath & path)
|
||||
{ return std::nullopt; }
|
||||
|
||||
bool operator == (const InputAccessor & x) const
|
||||
{
|
||||
return number == x.number;
|
||||
}
|
||||
|
||||
bool operator < (const InputAccessor & x) const
|
||||
{
|
||||
return number < x.number;
|
||||
}
|
||||
|
||||
void setPathDisplay(std::string displayPrefix, std::string displaySuffix = "");
|
||||
|
||||
virtual std::string showPath(const CanonPath & path);
|
||||
|
||||
SourcePath root();
|
||||
|
||||
/* Return the maximum last-modified time of the files in this
|
||||
tree, if available. */
|
||||
virtual std::optional<time_t> getLastModified()
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
};
|
||||
|
||||
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
||||
|
||||
struct SourcePath;
|
||||
|
||||
struct MemoryInputAccessor : InputAccessor
|
||||
{
|
||||
virtual SourcePath addFile(CanonPath path, std::string && contents) = 0;
|
||||
};
|
||||
|
||||
ref<MemoryInputAccessor> makeMemoryInputAccessor();
|
||||
|
||||
ref<InputAccessor> makeZipInputAccessor(const CanonPath & path);
|
||||
|
||||
ref<InputAccessor> makePatchingInputAccessor(
|
||||
ref<InputAccessor> next,
|
||||
const std::vector<std::string> & patches);
|
||||
|
||||
struct SourcePath
|
||||
{
|
||||
ref<InputAccessor> accessor;
|
||||
CanonPath path;
|
||||
|
||||
std::string_view baseName() const;
|
||||
|
||||
SourcePath parent() const;
|
||||
|
||||
std::string readFile() const
|
||||
{ return accessor->readFile(path); }
|
||||
|
||||
bool pathExists() const
|
||||
{ return accessor->pathExists(path); }
|
||||
|
||||
InputAccessor::Stat lstat() const
|
||||
{ return accessor->lstat(path); }
|
||||
|
||||
std::optional<InputAccessor::Stat> maybeLstat() const
|
||||
{ return accessor->maybeLstat(path); }
|
||||
|
||||
InputAccessor::DirEntries readDirectory() const
|
||||
{ return accessor->readDirectory(path); }
|
||||
|
||||
std::string readLink() const
|
||||
{ return accessor->readLink(path); }
|
||||
|
||||
void dumpPath(
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter) const
|
||||
{ return accessor->dumpPath(path, sink, filter); }
|
||||
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
std::string_view name,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair) const;
|
||||
|
||||
std::optional<CanonPath> getPhysicalPath() const
|
||||
{ return accessor->getPhysicalPath(path); }
|
||||
|
||||
std::string to_string() const
|
||||
{ return accessor->showPath(path); }
|
||||
|
||||
SourcePath operator + (const CanonPath & x) const
|
||||
{ return {accessor, path + x}; }
|
||||
|
||||
SourcePath operator + (std::string_view c) const
|
||||
{ return {accessor, path + c}; }
|
||||
|
||||
bool operator == (const SourcePath & x) const
|
||||
{
|
||||
return std::tie(accessor, path) == std::tie(x.accessor, x.path);
|
||||
}
|
||||
|
||||
bool operator != (const SourcePath & x) const
|
||||
{
|
||||
return std::tie(accessor, path) != std::tie(x.accessor, x.path);
|
||||
}
|
||||
|
||||
bool operator < (const SourcePath & x) const
|
||||
{
|
||||
return std::tie(accessor, path) < std::tie(x.accessor, x.path);
|
||||
}
|
||||
|
||||
SourcePath resolveSymlinks() const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path);
|
||||
|
||||
}
|
|
@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
|||
|
||||
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
|
||||
|
||||
libfetchers_LDFLAGS += -pthread
|
||||
libfetchers_LDFLAGS += -pthread -lzip
|
||||
|
||||
libfetchers_LIBS = libutil libstore
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
#include "tarfile.hh"
|
||||
#include "store-api.hh"
|
||||
#include "url-parts.hh"
|
||||
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
#include <sys/time.h>
|
||||
|
@ -43,7 +43,7 @@ static std::string runHg(const Strings & args, const std::optional<std::string>
|
|||
|
||||
struct MercurialInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) const override
|
||||
{
|
||||
if (url.scheme != "hg+http" &&
|
||||
url.scheme != "hg+https" &&
|
||||
|
@ -69,7 +69,7 @@ struct MercurialInputScheme : InputScheme
|
|||
return inputFromAttrs(attrs);
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
|
||||
|
||||
|
@ -89,7 +89,7 @@ struct MercurialInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) override
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
url.scheme = "hg+" + url.scheme;
|
||||
|
@ -98,17 +98,10 @@ struct MercurialInputScheme : InputScheme
|
|||
return url;
|
||||
}
|
||||
|
||||
bool hasAllInfo(const Input & input) override
|
||||
{
|
||||
// FIXME: ugly, need to distinguish between dirty and clean
|
||||
// default trees.
|
||||
return input.getRef() == "default" || maybeGetIntAttr(input.attrs, "revCount");
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) override
|
||||
std::optional<Hash> rev) const override
|
||||
{
|
||||
auto res(input);
|
||||
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
@ -116,26 +109,29 @@ struct MercurialInputScheme : InputScheme
|
|||
return res;
|
||||
}
|
||||
|
||||
std::optional<Path> getSourcePath(const Input & input) override
|
||||
void putFile(
|
||||
const Input & input,
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
||||
return url.path;
|
||||
return {};
|
||||
}
|
||||
auto [isLocal, repoPath] = getActualUrl(input);
|
||||
if (!isLocal)
|
||||
throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string());
|
||||
|
||||
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
||||
{
|
||||
auto sourcePath = getSourcePath(input);
|
||||
assert(sourcePath);
|
||||
auto absPath = CanonPath(repoPath) + path;
|
||||
|
||||
// FIXME: make sure that absPath is not a symlink that escapes
|
||||
// the repo.
|
||||
writeFile(absPath.abs(), contents);
|
||||
|
||||
// FIXME: shut up if file is already tracked.
|
||||
runHg(
|
||||
{ "add", *sourcePath + "/" + std::string(file) });
|
||||
{ "add", absPath.abs() });
|
||||
|
||||
if (commitMsg)
|
||||
runHg(
|
||||
{ "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
|
||||
{ "commit", absPath.abs(), "-m", *commitMsg });
|
||||
}
|
||||
|
||||
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||
|
@ -145,9 +141,9 @@ struct MercurialInputScheme : InputScheme
|
|||
return {isLocal, isLocal ? url.path : url.base};
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
StorePath fetchToStore(ref<Store> store, Input & input) const
|
||||
{
|
||||
Input input(_input);
|
||||
auto origRev = input.getRev();
|
||||
|
||||
auto name = input.getName();
|
||||
|
||||
|
@ -197,7 +193,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
return storePath;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,13 +217,12 @@ struct MercurialInputScheme : InputScheme
|
|||
});
|
||||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||
-> std::pair<StorePath, Input>
|
||||
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
|
||||
{
|
||||
assert(input.getRev());
|
||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||
assert(!origRev || origRev == input.getRev());
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
return {std::move(storePath), input};
|
||||
return storePath;
|
||||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
|
@ -307,7 +302,7 @@ struct MercurialInputScheme : InputScheme
|
|||
{"revCount", (uint64_t) revCount},
|
||||
});
|
||||
|
||||
if (!_input.getRev())
|
||||
if (!origRev)
|
||||
getCache()->add(
|
||||
store,
|
||||
unlockedAttrs,
|
||||
|
@ -324,6 +319,20 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
return makeResult(infoAttrs, std::move(storePath));
|
||||
}
|
||||
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
auto storePath = fetchToStore(store, input);
|
||||
|
||||
return {makeStorePathAccessor(store, storePath), input};
|
||||
}
|
||||
|
||||
bool isLocked(const Input & input) const override
|
||||
{
|
||||
return (bool) input.getRev();
|
||||
}
|
||||
};
|
||||
|
||||
static auto rMercurialInputScheme = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
|
||||
|
|
116
src/libfetchers/patching-input-accessor.cc
Normal file
116
src/libfetchers/patching-input-accessor.cc
Normal file
|
@ -0,0 +1,116 @@
|
|||
#include "input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
// TODO: handle file creation / deletion.
|
||||
struct PatchingInputAccessor : InputAccessor
|
||||
{
|
||||
ref<InputAccessor> next;
|
||||
|
||||
std::map<CanonPath, std::vector<std::string>> patchesPerFile;
|
||||
|
||||
PatchingInputAccessor(
|
||||
ref<InputAccessor> next,
|
||||
const std::vector<std::string> & patches)
|
||||
: next(next)
|
||||
{
|
||||
/* Extract the patches for each file. */
|
||||
for (auto & patch : patches) {
|
||||
std::string_view p = patch;
|
||||
std::string_view start;
|
||||
std::string_view fileName;
|
||||
|
||||
auto flush = [&]()
|
||||
{
|
||||
if (start.empty()) return;
|
||||
auto contents = start.substr(0, p.data() - start.data());
|
||||
start = "";
|
||||
auto slash = fileName.find('/');
|
||||
if (slash == fileName.npos) return;
|
||||
fileName = fileName.substr(slash);
|
||||
debug("found patch for '%s'", fileName);
|
||||
patchesPerFile.emplace(fileName, std::vector<std::string>())
|
||||
.first->second.push_back(std::string(contents));
|
||||
};
|
||||
|
||||
while (!p.empty()) {
|
||||
auto [line, rest] = getLine(p);
|
||||
|
||||
if (hasPrefix(line, "--- ")) {
|
||||
flush();
|
||||
start = p;
|
||||
fileName = line.substr(4);
|
||||
}
|
||||
|
||||
if (!start.empty()) {
|
||||
if (!(hasPrefix(line, "+++ ")
|
||||
|| hasPrefix(line, "@@")
|
||||
|| hasPrefix(line, "+")
|
||||
|| hasPrefix(line, "-")
|
||||
|| hasPrefix(line, " ")
|
||||
|| line.empty()))
|
||||
{
|
||||
flush();
|
||||
}
|
||||
}
|
||||
|
||||
p = rest;
|
||||
}
|
||||
|
||||
flush();
|
||||
}
|
||||
}
|
||||
|
||||
std::string readFile(const CanonPath & path) override
|
||||
{
|
||||
auto contents = next->readFile(path);
|
||||
|
||||
auto i = patchesPerFile.find(path);
|
||||
if (i != patchesPerFile.end()) {
|
||||
for (auto & patch : i->second) {
|
||||
auto tempDir = createTempDir();
|
||||
AutoDelete del(tempDir);
|
||||
auto sourceFile = tempDir + "/source";
|
||||
auto rejFile = tempDir + "/source.rej";
|
||||
writeFile(sourceFile, contents);
|
||||
try {
|
||||
contents = runProgram("patch", true, {"--quiet", sourceFile, "--output=-", "-r", rejFile}, patch);
|
||||
} catch (ExecError & e) {
|
||||
del.cancel();
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return contents;
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
return next->pathExists(path);
|
||||
}
|
||||
|
||||
Stat lstat(const CanonPath & path) override
|
||||
{
|
||||
return next->lstat(path);
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
return next->readDirectory(path);
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
return next->readLink(path);
|
||||
}
|
||||
};
|
||||
|
||||
ref<InputAccessor> makePatchingInputAccessor(
|
||||
ref<InputAccessor> next,
|
||||
const std::vector<std::string> & patches)
|
||||
{
|
||||
return make_ref<PatchingInputAccessor>(next, std::move(patches));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,12 +1,13 @@
|
|||
#include "fetchers.hh"
|
||||
#include "store-api.hh"
|
||||
#include "archive.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
struct PathInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) const override
|
||||
{
|
||||
if (url.scheme != "path") return {};
|
||||
|
||||
|
@ -26,25 +27,31 @@ struct PathInputScheme : InputScheme
|
|||
else
|
||||
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
|
||||
}
|
||||
else if (name == "lock")
|
||||
input.attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||
else
|
||||
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
if (maybeGetStrAttr(attrs, "type") != "path") return {};
|
||||
|
||||
getStrAttr(attrs, "path");
|
||||
maybeGetBoolAttr(attrs, "lock");
|
||||
|
||||
for (auto & [name, value] : attrs)
|
||||
/* Allow the user to pass in "fake" tree info
|
||||
attributes. This is useful for making a pinned tree
|
||||
work the same as the repository from which is exported
|
||||
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
|
||||
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path")
|
||||
// checked in Input::fromAttrs
|
||||
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...).
|
||||
FIXME: remove this hack once we have a prepopulated
|
||||
flake input cache mechanism.
|
||||
*/
|
||||
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path" || name == "lock")
|
||||
// checked elsewhere
|
||||
;
|
||||
else
|
||||
throw Error("unsupported path input attribute '%s'", name);
|
||||
|
@ -54,7 +61,12 @@ struct PathInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) override
|
||||
bool getLockAttr(const Input & input) const
|
||||
{
|
||||
return maybeGetBoolAttr(input.attrs, "lock").value_or(false);
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
auto query = attrsToQuery(input.attrs);
|
||||
query.erase("path");
|
||||
|
@ -66,65 +78,87 @@ struct PathInputScheme : InputScheme
|
|||
};
|
||||
}
|
||||
|
||||
bool hasAllInfo(const Input & input) override
|
||||
std::optional<std::string> isRelative(const Input & input) const override
|
||||
{
|
||||
return true;
|
||||
auto path = getStrAttr(input.attrs, "path");
|
||||
if (hasPrefix(path, "/"))
|
||||
return std::nullopt;
|
||||
else
|
||||
return path;
|
||||
}
|
||||
|
||||
std::optional<Path> getSourcePath(const Input & input) override
|
||||
bool isLocked(const Input & input) const override
|
||||
{
|
||||
return getStrAttr(input.attrs, "path");
|
||||
return (bool) input.getNarHash();
|
||||
}
|
||||
|
||||
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
||||
void putFile(
|
||||
const Input & input,
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
{
|
||||
// nothing to do
|
||||
auto absPath = CanonPath(getAbsPath(input)) + path;
|
||||
|
||||
// FIXME: make sure that absPath is not a symlink that escapes
|
||||
// the repo.
|
||||
writeFile(absPath.abs(), contents);
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
CanonPath getAbsPath(const Input & input) const
|
||||
{
|
||||
Input input(_input);
|
||||
std::string absPath;
|
||||
auto path = getStrAttr(input.attrs, "path");
|
||||
|
||||
if (path[0] != '/') {
|
||||
if (!input.parent)
|
||||
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
|
||||
if (path[0] == '/')
|
||||
return CanonPath(path);
|
||||
|
||||
auto parent = canonPath(*input.parent);
|
||||
|
||||
// the path isn't relative, prefix it
|
||||
absPath = nix::absPath(path, parent);
|
||||
|
||||
// for security, ensure that if the parent is a store path, it's inside it
|
||||
if (store->isInStore(parent)) {
|
||||
auto storePath = store->printStorePath(store->toStorePath(parent).first);
|
||||
if (!isDirOrInDir(absPath, storePath))
|
||||
throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
|
||||
}
|
||||
} else
|
||||
absPath = path;
|
||||
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s'", absPath));
|
||||
|
||||
// FIXME: check whether access to 'path' is allowed.
|
||||
auto storePath = store->maybeParseStorePath(absPath);
|
||||
|
||||
if (storePath)
|
||||
store->addTempRoot(*storePath);
|
||||
|
||||
time_t mtime = 0;
|
||||
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) {
|
||||
// FIXME: try to substitute storePath.
|
||||
auto src = sinkToSource([&](Sink & sink) {
|
||||
mtime = dumpPathAndGetMtime(absPath, sink, defaultPathFilter);
|
||||
});
|
||||
storePath = store->addToStoreFromDump(*src, "source");
|
||||
}
|
||||
input.attrs.insert_or_assign("lastModified", uint64_t(mtime));
|
||||
|
||||
return {std::move(*storePath), input};
|
||||
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
|
||||
}
|
||||
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
auto absPath = getAbsPath(input);
|
||||
auto input2(input);
|
||||
input2.attrs.emplace("path", (std::string) absPath.abs());
|
||||
|
||||
if (getLockAttr(input2)) {
|
||||
|
||||
auto storePath = store->maybeParseStorePath(absPath.abs());
|
||||
|
||||
if (!storePath || storePath->name() != input.getName() || !store->isValidPath(*storePath)) {
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", absPath));
|
||||
storePath = store->addToStore(input.getName(), absPath.abs());
|
||||
auto narHash = store->queryPathInfo(*storePath)->narHash;
|
||||
input2.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
} else
|
||||
input2.attrs.erase("narHash");
|
||||
|
||||
input2.attrs.erase("lastModified");
|
||||
|
||||
auto makeNotAllowedError = [absPath](const CanonPath & path) -> RestrictedPathError
|
||||
{
|
||||
return RestrictedPathError("path '%s' does not exist'", absPath + path);
|
||||
};
|
||||
|
||||
return {makeStorePathAccessor(store, *storePath, std::move(makeNotAllowedError)), std::move(input2)};
|
||||
|
||||
} else {
|
||||
return {makeFSInputAccessor(absPath), std::move(input2)};
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
/* If this path is in the Nix store, we can consider it
|
||||
locked, so just use the path as its fingerprint. Maybe we
|
||||
should restrict this to CA paths but that's not
|
||||
super-important. */
|
||||
auto path = getAbsPath(input);
|
||||
if (store->isInStore(path.abs()))
|
||||
return path.abs();
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "registry.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "tarball.hh"
|
||||
#include "util.hh"
|
||||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "tarball.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "cache.hh"
|
||||
#include "filetransfer.hh"
|
||||
|
@ -7,6 +8,7 @@
|
|||
#include "tarfile.hh"
|
||||
#include "types.hh"
|
||||
#include "split.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -110,7 +112,7 @@ DownloadFileResult downloadFile(
|
|||
};
|
||||
}
|
||||
|
||||
std::pair<Tree, time_t> downloadTarball(
|
||||
std::pair<StorePath, time_t> downloadTarball(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
|
@ -127,7 +129,7 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
|
||||
if (cached && !cached->expired)
|
||||
return {
|
||||
Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
|
||||
std::move(cached->storePath),
|
||||
getIntAttr(cached->infoAttrs, "lastModified")
|
||||
};
|
||||
|
||||
|
@ -164,7 +166,7 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
locked);
|
||||
|
||||
return {
|
||||
Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
|
||||
std::move(*unpackedStorePath),
|
||||
lastModified,
|
||||
};
|
||||
}
|
||||
|
@ -185,7 +187,7 @@ struct CurlInputScheme : InputScheme
|
|||
|
||||
virtual bool isValidURL(const ParsedURL & url) const = 0;
|
||||
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url) const override
|
||||
{
|
||||
if (!isValidURL(url))
|
||||
return std::nullopt;
|
||||
|
@ -203,7 +205,7 @@ struct CurlInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||
{
|
||||
auto type = maybeGetStrAttr(attrs, "type");
|
||||
if (type != inputType()) return {};
|
||||
|
@ -220,20 +222,20 @@ struct CurlInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
ParsedURL toURL(const Input & input) override
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
// NAR hashes are preferred over file hashes since tar/zip files // don't have a canonical representation.
|
||||
// NAR hashes are preferred over file hashes since tar/zip
|
||||
// files don't have a canonical representation.
|
||||
if (auto narHash = input.getNarHash())
|
||||
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||
return url;
|
||||
}
|
||||
|
||||
bool hasAllInfo(const Input & input) override
|
||||
bool isLocked(const Input & input) const override
|
||||
{
|
||||
return true;
|
||||
return (bool) input.getNarHash();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
struct FileInputScheme : CurlInputScheme
|
||||
|
@ -249,10 +251,17 @@ struct FileInputScheme : CurlInputScheme
|
|||
: !hasTarballExtension(url.path));
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
auto input(_input);
|
||||
|
||||
auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
|
||||
return {std::move(file.storePath), input};
|
||||
|
||||
// FIXME: remove?
|
||||
auto narHash = store->queryPathInfo(file.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
|
||||
return {makeStorePathAccessor(store, file.storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -270,10 +279,17 @@ struct TarballInputScheme : CurlInputScheme
|
|||
: hasTarballExtension(url.path));
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
|
||||
return {std::move(tree.storePath), input};
|
||||
auto input(_input);
|
||||
|
||||
auto storePath = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
|
||||
|
||||
// FIXME: remove?
|
||||
auto narHash = store->queryPathInfo(storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
|
||||
return {makeStorePathAccessor(store, storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
29
src/libfetchers/tarball.hh
Normal file
29
src/libfetchers/tarball.hh
Normal file
|
@ -0,0 +1,29 @@
|
|||
#pragma once
|
||||
|
||||
#include "types.hh"
|
||||
#include "path.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
struct DownloadFileResult
|
||||
{
|
||||
StorePath storePath;
|
||||
std::string etag;
|
||||
std::string effectiveUrl;
|
||||
};
|
||||
|
||||
DownloadFileResult downloadFile(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
std::pair<StorePath, time_t> downloadTarball(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
}
|
196
src/libfetchers/zip-input-accessor.cc
Normal file
196
src/libfetchers/zip-input-accessor.cc
Normal file
|
@ -0,0 +1,196 @@
|
|||
#include "input-accessor.hh"
|
||||
|
||||
#include <zip.h>
|
||||
#include <arpa/inet.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct cmp_str
|
||||
{
|
||||
bool operator ()(const char * a, const char * b) const
|
||||
{
|
||||
return std::strcmp(a, b) < 0;
|
||||
}
|
||||
};
|
||||
|
||||
struct ZipMember
|
||||
{
|
||||
struct zip_file * p = nullptr;
|
||||
ZipMember(struct zip_file * p) : p(p) { }
|
||||
~ZipMember() { if (p) zip_fclose(p); }
|
||||
operator zip_file *() { return p; }
|
||||
};
|
||||
|
||||
struct ZipInputAccessor : InputAccessor
|
||||
{
|
||||
CanonPath zipPath;
|
||||
struct zip * zipFile = nullptr;
|
||||
|
||||
typedef std::map<const char *, struct zip_stat, cmp_str> Members;
|
||||
Members members;
|
||||
|
||||
time_t lastModified = 0;
|
||||
|
||||
ZipInputAccessor(const CanonPath & _zipPath)
|
||||
: zipPath(_zipPath)
|
||||
{
|
||||
int error;
|
||||
zipFile = zip_open(zipPath.c_str(), ZIP_RDONLY, &error);
|
||||
if (!zipFile) {
|
||||
char errorMsg[1024];
|
||||
zip_error_to_str(errorMsg, sizeof errorMsg, error, errno);
|
||||
throw Error("couldn't open '%s': %s", zipPath, errorMsg);
|
||||
}
|
||||
|
||||
/* Read the index of the zip file and put it in a map. This
|
||||
is unfortunately necessary because libzip's lookup
|
||||
functions are O(n) time. */
|
||||
struct zip_stat sb;
|
||||
zip_uint64_t nrEntries = zip_get_num_entries(zipFile, 0);
|
||||
for (zip_uint64_t n = 0; n < nrEntries; ++n) {
|
||||
if (zip_stat_index(zipFile, n, 0, &sb))
|
||||
throw Error("couldn't stat archive member #%d in '%s': %s", n, zipPath, zip_strerror(zipFile));
|
||||
|
||||
/* Get the timestamp of this file. */
|
||||
#if 0
|
||||
if (sb.valid & ZIP_STAT_MTIME)
|
||||
lastModified = std::max(lastModified, sb.mtime);
|
||||
#endif
|
||||
auto nExtra = zip_file_extra_fields_count(zipFile, n, ZIP_FL_CENTRAL);
|
||||
for (auto i = 0; i < nExtra; ++i) {
|
||||
zip_uint16_t id, len;
|
||||
auto extra = zip_file_extra_field_get(zipFile, i, 0, &id, &len, ZIP_FL_CENTRAL);
|
||||
if (id == 0x5455 && len >= 5)
|
||||
lastModified = std::max(lastModified, (time_t) readLittleEndian<uint32_t>((unsigned char *) extra + 1));
|
||||
}
|
||||
|
||||
auto slash = strchr(sb.name, '/');
|
||||
if (!slash) continue;
|
||||
members.emplace(slash, sb);
|
||||
}
|
||||
}
|
||||
|
||||
~ZipInputAccessor()
|
||||
{
|
||||
if (zipFile) zip_close(zipFile);
|
||||
}
|
||||
|
||||
std::string _readFile(const CanonPath & path)
|
||||
{
|
||||
auto i = members.find(((std::string) path.abs()).c_str());
|
||||
if (i == members.end())
|
||||
throw Error("file '%s' does not exist", showPath(path));
|
||||
|
||||
ZipMember member(zip_fopen_index(zipFile, i->second.index, 0));
|
||||
if (!member)
|
||||
throw Error("couldn't open archive member '%s': %s",
|
||||
showPath(path), zip_strerror(zipFile));
|
||||
|
||||
std::string buf(i->second.size, 0);
|
||||
if (zip_fread(member, buf.data(), i->second.size) != (zip_int64_t) i->second.size)
|
||||
throw Error("couldn't read archive member '%s' in '%s'", path, zipPath);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
std::string readFile(const CanonPath & path) override
|
||||
{
|
||||
if (lstat(path).type != tRegular)
|
||||
throw Error("file '%s' is not a regular file", path);
|
||||
|
||||
return _readFile(path);
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
return
|
||||
members.find(path.c_str()) != members.end()
|
||||
|| members.find(((std::string) path.abs() + "/").c_str()) != members.end();
|
||||
}
|
||||
|
||||
Stat lstat(const CanonPath & path) override
|
||||
{
|
||||
if (path.isRoot())
|
||||
return Stat { .type = tDirectory };
|
||||
|
||||
Type type = tRegular;
|
||||
bool isExecutable = false;
|
||||
|
||||
auto i = members.find(path.c_str());
|
||||
if (i == members.end()) {
|
||||
i = members.find(((std::string) path.abs() + "/").c_str());
|
||||
type = tDirectory;
|
||||
}
|
||||
if (i == members.end())
|
||||
throw Error("file '%s' does not exist", showPath(path));
|
||||
|
||||
// FIXME: cache this
|
||||
zip_uint8_t opsys;
|
||||
zip_uint32_t attributes;
|
||||
if (zip_file_get_external_attributes(zipFile, i->second.index, ZIP_FL_UNCHANGED, &opsys, &attributes) == -1)
|
||||
throw Error("couldn't get external attributes of '%s': %s",
|
||||
showPath(path), zip_strerror(zipFile));
|
||||
|
||||
switch (opsys) {
|
||||
case ZIP_OPSYS_UNIX:
|
||||
auto t = (attributes >> 16) & 0770000;
|
||||
switch (t) {
|
||||
case 0040000: type = tDirectory; break;
|
||||
case 0100000:
|
||||
type = tRegular;
|
||||
isExecutable = (attributes >> 16) & 0000100;
|
||||
break;
|
||||
case 0120000: type = tSymlink; break;
|
||||
default:
|
||||
throw Error("file '%s' has unsupported type %o", showPath(path), t);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return Stat { .type = type, .isExecutable = isExecutable };
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & _path) override
|
||||
{
|
||||
std::string path(_path.abs());
|
||||
if (path != "/") path += "/";
|
||||
|
||||
auto i = members.find(path.c_str());
|
||||
if (i == members.end())
|
||||
throw Error("directory '%s' does not exist", showPath(_path));
|
||||
|
||||
++i;
|
||||
|
||||
DirEntries entries;
|
||||
|
||||
for (; i != members.end() && strncmp(i->first, path.c_str(), path.size()) == 0; ++i) {
|
||||
auto start = i->first + path.size();
|
||||
auto slash = strchr(start, '/');
|
||||
if (slash && strcmp(slash, "/") != 0) continue;
|
||||
auto name = slash ? std::string(start, slash - start) : std::string(start);
|
||||
entries.emplace(name, std::nullopt);
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
if (lstat(path).type != tSymlink)
|
||||
throw Error("file '%s' is not a symlink", showPath(path));
|
||||
|
||||
return _readFile(path);
|
||||
}
|
||||
|
||||
std::optional<time_t> getLastModified() override
|
||||
{
|
||||
return lastModified;
|
||||
}
|
||||
};
|
||||
|
||||
ref<InputAccessor> makeZipInputAccessor(const CanonPath & path)
|
||||
{
|
||||
return make_ref<ZipInputAccessor>(path);
|
||||
}
|
||||
|
||||
}
|
|
@ -132,7 +132,7 @@ public:
|
|||
log(*state, lvl, fs.s);
|
||||
}
|
||||
|
||||
void logEI(const ErrorInfo &ei) override
|
||||
void logEI(const ErrorInfo & ei) override
|
||||
{
|
||||
auto state(state_.lock());
|
||||
|
||||
|
|
|
@ -354,7 +354,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
|
|||
try {
|
||||
getFile(info->url, *decompressor);
|
||||
} catch (NoSuchBinaryCacheFile & e) {
|
||||
throw SubstituteGone(e.info());
|
||||
throw SubstituteGone(std::move(e.info()));
|
||||
}
|
||||
|
||||
decompressor->finish();
|
||||
|
|
|
@ -135,7 +135,7 @@ void DerivationGoal::killChild()
|
|||
void DerivationGoal::timedOut(Error && ex)
|
||||
{
|
||||
killChild();
|
||||
done(BuildResult::TimedOut, {}, ex);
|
||||
done(BuildResult::TimedOut, {}, std::move(ex));
|
||||
}
|
||||
|
||||
|
||||
|
@ -951,7 +951,7 @@ void DerivationGoal::buildDone()
|
|||
BuildResult::PermanentFailure;
|
||||
}
|
||||
|
||||
done(st, {}, e);
|
||||
done(st, {}, std::move(e));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -1402,7 +1402,7 @@ void DerivationGoal::done(
|
|||
fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl;
|
||||
}
|
||||
|
||||
amDone(buildResult.success() ? ecSuccess : ecFailed, ex);
|
||||
amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
if (ex)
|
||||
logError(i->ex->info());
|
||||
else
|
||||
ex = i->ex;
|
||||
ex = std::move(i->ex);
|
||||
}
|
||||
if (i->exitCode != Goal::ecSuccess) {
|
||||
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get())) failed.insert(i2->drvPath);
|
||||
|
@ -40,7 +40,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
|
||||
if (failed.size() == 1 && ex) {
|
||||
ex->status = worker.exitStatus();
|
||||
throw *ex;
|
||||
throw std::move(*ex);
|
||||
} else if (!failed.empty()) {
|
||||
if (ex) logError(ex->info());
|
||||
throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed));
|
||||
|
@ -109,7 +109,7 @@ void Store::ensurePath(const StorePath & path)
|
|||
if (goal->exitCode != Goal::ecSuccess) {
|
||||
if (goal->ex) {
|
||||
goal->ex->status = worker.exitStatus();
|
||||
throw *goal->ex;
|
||||
throw std::move(*goal->ex);
|
||||
} else
|
||||
throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
|
||||
}
|
||||
|
|
|
@ -193,7 +193,7 @@ void LocalDerivationGoal::tryLocalBuild() {
|
|||
outputLocks.unlock();
|
||||
buildUser.reset();
|
||||
worker.permanentFailure = true;
|
||||
done(BuildResult::InputRejected, {}, e);
|
||||
done(BuildResult::InputRejected, {}, std::move(e));
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -448,7 +448,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
|
||||
|
||||
// FIXME: remove
|
||||
bool isDerivation(const std::string & fileName)
|
||||
bool isDerivation(std::string_view fileName)
|
||||
{
|
||||
return hasSuffix(fileName, drvExtension);
|
||||
}
|
||||
|
|
|
@ -224,7 +224,7 @@ StorePath writeDerivation(Store & store,
|
|||
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
|
||||
|
||||
// FIXME: remove
|
||||
bool isDerivation(const std::string & fileName);
|
||||
bool isDerivation(std::string_view fileName);
|
||||
|
||||
/* Calculate the name that will be used for the store path for this
|
||||
output.
|
||||
|
|
|
@ -33,14 +33,6 @@ FileTransferSettings fileTransferSettings;
|
|||
|
||||
static GlobalConfig::Register rFileTransferSettings(&fileTransferSettings);
|
||||
|
||||
std::string resolveUri(std::string_view uri)
|
||||
{
|
||||
if (uri.compare(0, 8, "channel:") == 0)
|
||||
return "https://nixos.org/channels/" + std::string(uri.substr(8)) + "/nixexprs.tar.xz";
|
||||
else
|
||||
return std::string(uri);
|
||||
}
|
||||
|
||||
struct curlFileTransfer : public FileTransfer
|
||||
{
|
||||
CURLM * curlm = 0;
|
||||
|
@ -142,9 +134,9 @@ struct curlFileTransfer : public FileTransfer
|
|||
}
|
||||
|
||||
template<class T>
|
||||
void fail(const T & e)
|
||||
void fail(T && e)
|
||||
{
|
||||
failEx(std::make_exception_ptr(e));
|
||||
failEx(std::make_exception_ptr(std::move(e)));
|
||||
}
|
||||
|
||||
LambdaSink finalSink;
|
||||
|
@ -472,7 +464,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
fileTransfer.enqueueItem(shared_from_this());
|
||||
}
|
||||
else
|
||||
fail(exc);
|
||||
fail(std::move(exc));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -873,14 +865,4 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<st
|
|||
err.msg = hf;
|
||||
}
|
||||
|
||||
bool isUri(std::string_view s)
|
||||
{
|
||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||
size_t pos = s.find("://");
|
||||
if (pos == std::string::npos) return false;
|
||||
std::string scheme(s, 0, pos);
|
||||
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -125,9 +125,4 @@ public:
|
|||
FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args);
|
||||
};
|
||||
|
||||
bool isUri(std::string_view s);
|
||||
|
||||
/* Resolve deprecated 'channel:<foo>' URLs. */
|
||||
std::string resolveUri(std::string_view uri);
|
||||
|
||||
}
|
||||
|
|
|
@ -447,7 +447,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
|||
} catch (Error & e) {
|
||||
// Ugly backwards compatibility hack.
|
||||
if (e.msg().find("is not valid") != std::string::npos)
|
||||
throw InvalidPath(e.info());
|
||||
throw InvalidPath(std::move(e.info()));
|
||||
throw;
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) {
|
||||
|
|
|
@ -17,21 +17,21 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
bool Store::isInStore(const Path & path) const
|
||||
bool Store::isInStore(PathView path) const
|
||||
{
|
||||
return isInDir(path, storeDir);
|
||||
}
|
||||
|
||||
|
||||
std::pair<StorePath, Path> Store::toStorePath(const Path & path) const
|
||||
std::pair<StorePath, Path> Store::toStorePath(PathView path) const
|
||||
{
|
||||
if (!isInStore(path))
|
||||
throw Error("path '%1%' is not in the Nix store", path);
|
||||
Path::size_type slash = path.find('/', storeDir.size() + 1);
|
||||
auto slash = path.find('/', storeDir.size() + 1);
|
||||
if (slash == Path::npos)
|
||||
return {parseStorePath(path), ""};
|
||||
else
|
||||
return {parseStorePath(std::string_view(path).substr(0, slash)), path.substr(slash)};
|
||||
return {parseStorePath(path.substr(0, slash)), (Path) path.substr(slash)};
|
||||
}
|
||||
|
||||
|
||||
|
@ -220,13 +220,17 @@ StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
|
|||
}
|
||||
|
||||
|
||||
std::pair<StorePath, Hash> Store::computeStorePathForPath(std::string_view name,
|
||||
const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter) const
|
||||
std::pair<StorePath, Hash> Store::computeStorePathFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
const StorePathSet & references) const
|
||||
{
|
||||
Hash h = method == FileIngestionMethod::Recursive
|
||||
? hashPath(hashAlgo, srcPath, filter).first
|
||||
: hashFile(hashAlgo, srcPath);
|
||||
return std::make_pair(makeFixedOutputPath(method, h, name), h);
|
||||
HashSink sink(hashAlgo);
|
||||
dump.drainInto(sink);
|
||||
auto hash = sink.finish().first;
|
||||
return {makeFixedOutputPath(method, hash, name, references), hash};
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -179,7 +179,7 @@ public:
|
|||
|
||||
/* Return true if ‘path’ is in the Nix store (but not the Nix
|
||||
store itself). */
|
||||
bool isInStore(const Path & path) const;
|
||||
bool isInStore(PathView path) const;
|
||||
|
||||
/* Return true if ‘path’ is a store path, i.e. a direct child of
|
||||
the Nix store. */
|
||||
|
@ -187,7 +187,7 @@ public:
|
|||
|
||||
/* Split a path like /nix/store/<hash>-<name>/<bla> into
|
||||
/nix/store/<hash>-<name> and /<bla>. */
|
||||
std::pair<StorePath, Path> toStorePath(const Path & path) const;
|
||||
std::pair<StorePath, Path> toStorePath(PathView path) const;
|
||||
|
||||
/* Follow symlinks until we end up with a path in the Nix store. */
|
||||
Path followLinksToStore(std::string_view path) const;
|
||||
|
@ -217,12 +217,14 @@ public:
|
|||
const StorePathSet & references = {},
|
||||
bool hasSelfReference = false) const;
|
||||
|
||||
/* This is the preparatory part of addToStore(); it computes the
|
||||
store path to which srcPath is to be copied. Returns the store
|
||||
path and the cryptographic hash of the contents of srcPath. */
|
||||
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
|
||||
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
|
||||
/* Read-only variant of addToStoreFromDump(). It returns the store
|
||||
path to which a NAR or flat file would be written. */
|
||||
std::pair<StorePath, Hash> computeStorePathFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
HashType hashAlgo = htSHA256,
|
||||
const StorePathSet & references = {}) const;
|
||||
|
||||
/* Preparatory part of addTextToStore().
|
||||
|
||||
|
|
|
@ -35,10 +35,6 @@ static ArchiveSettings archiveSettings;
|
|||
|
||||
static GlobalConfig::Register rArchiveSettings(&archiveSettings);
|
||||
|
||||
const std::string narVersionMagic1 = "nix-archive-1";
|
||||
|
||||
static std::string caseHackSuffix = "~nix~case~hack~";
|
||||
|
||||
PathFilter defaultPathFilter = [](const Path &) { return true; };
|
||||
|
||||
|
||||
|
|
|
@ -103,7 +103,9 @@ void copyNAR(Source & source, Sink & sink);
|
|||
void copyPath(const Path & from, const Path & to);
|
||||
|
||||
|
||||
extern const std::string narVersionMagic1;
|
||||
inline constexpr std::string_view narVersionMagic1 = "nix-archive-1";
|
||||
|
||||
inline constexpr std::string_view caseHackSuffix = "~nix~case~hack~";
|
||||
|
||||
|
||||
}
|
||||
|
|
103
src/libutil/canon-path.cc
Normal file
103
src/libutil/canon-path.cc
Normal file
|
@ -0,0 +1,103 @@
|
|||
#include "canon-path.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
CanonPath CanonPath::root = CanonPath("/");
|
||||
|
||||
CanonPath::CanonPath(std::string_view raw)
|
||||
: path(absPath((Path) raw, "/"))
|
||||
{ }
|
||||
|
||||
CanonPath::CanonPath(std::string_view raw, const CanonPath & root)
|
||||
: path(absPath((Path) raw, root.abs()))
|
||||
{ }
|
||||
|
||||
std::optional<CanonPath> CanonPath::parent() const
|
||||
{
|
||||
if (isRoot()) return std::nullopt;
|
||||
return CanonPath(unchecked_t(), path.substr(0, std::max((size_t) 1, path.rfind('/'))));
|
||||
}
|
||||
|
||||
void CanonPath::pop()
|
||||
{
|
||||
assert(!isRoot());
|
||||
path.resize(std::max((size_t) 1, path.rfind('/')));
|
||||
}
|
||||
|
||||
bool CanonPath::isWithin(const CanonPath & parent) const
|
||||
{
|
||||
return !(
|
||||
path.size() < parent.path.size()
|
||||
|| path.substr(0, parent.path.size()) != parent.path
|
||||
|| (parent.path.size() > 1 && path.size() > parent.path.size()
|
||||
&& path[parent.path.size()] != '/'));
|
||||
}
|
||||
|
||||
CanonPath CanonPath::removePrefix(const CanonPath & prefix) const
|
||||
{
|
||||
assert(isWithin(prefix));
|
||||
if (prefix.isRoot()) return *this;
|
||||
if (path.size() == prefix.path.size()) return root;
|
||||
return CanonPath(unchecked_t(), path.substr(prefix.path.size()));
|
||||
}
|
||||
|
||||
void CanonPath::extend(const CanonPath & x)
|
||||
{
|
||||
if (x.isRoot()) return;
|
||||
if (isRoot())
|
||||
path += x.rel();
|
||||
else
|
||||
path += x.abs();
|
||||
}
|
||||
|
||||
CanonPath CanonPath::operator + (const CanonPath & x) const
|
||||
{
|
||||
auto res = *this;
|
||||
res.extend(x);
|
||||
return res;
|
||||
}
|
||||
|
||||
void CanonPath::push(std::string_view c)
|
||||
{
|
||||
assert(c.find('/') == c.npos);
|
||||
assert(c != "." && c != "..");
|
||||
if (!isRoot()) path += '/';
|
||||
path += c;
|
||||
}
|
||||
|
||||
CanonPath CanonPath::operator + (std::string_view c) const
|
||||
{
|
||||
auto res = *this;
|
||||
res.push(c);
|
||||
return res;
|
||||
}
|
||||
|
||||
bool CanonPath::isAllowed(const std::set<CanonPath> & allowed) const
|
||||
{
|
||||
/* Check if `this` is an exact match or the parent of an
|
||||
allowed path. */
|
||||
auto lb = allowed.lower_bound(*this);
|
||||
if (lb != allowed.end()) {
|
||||
if (lb->isWithin(*this))
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Check if a parent of `this` is allowed. */
|
||||
auto path = *this;
|
||||
while (!path.isRoot()) {
|
||||
path.pop();
|
||||
if (allowed.count(path))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
std::ostream & operator << (std::ostream & stream, const CanonPath & path)
|
||||
{
|
||||
stream << path.abs();
|
||||
return stream;
|
||||
}
|
||||
|
||||
}
|
165
src/libutil/canon-path.hh
Normal file
165
src/libutil/canon-path.hh
Normal file
|
@ -0,0 +1,165 @@
|
|||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <optional>
|
||||
#include <cassert>
|
||||
#include <iostream>
|
||||
#include <set>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* A canonical representation of a path. It ensures the following:
|
||||
|
||||
- It always starts with a slash.
|
||||
|
||||
- It never ends with a slash, except if the path is "/".
|
||||
|
||||
- A slash is never followed by a slash (i.e. no empty components).
|
||||
|
||||
- There are no components equal to '.' or '..'.
|
||||
|
||||
Note that the path does not need to correspond to an actually
|
||||
existing path, and there is no guarantee that symlinks are
|
||||
resolved.
|
||||
*/
|
||||
class CanonPath
|
||||
{
|
||||
std::string path;
|
||||
|
||||
public:
|
||||
|
||||
/* Construct a canon path from a non-canonical path. Any '.', '..'
|
||||
or empty components are removed. */
|
||||
CanonPath(std::string_view raw);
|
||||
|
||||
explicit CanonPath(const char * raw)
|
||||
: CanonPath(std::string_view(raw))
|
||||
{ }
|
||||
|
||||
struct unchecked_t { };
|
||||
|
||||
CanonPath(unchecked_t _, std::string path)
|
||||
: path(std::move(path))
|
||||
{ }
|
||||
|
||||
static CanonPath root;
|
||||
|
||||
/* If `raw` starts with a slash, return
|
||||
`CanonPath(raw)`. Otherwise return a `CanonPath` representing
|
||||
`root + "/" + raw`. */
|
||||
CanonPath(std::string_view raw, const CanonPath & root);
|
||||
|
||||
bool isRoot() const
|
||||
{ return path.size() <= 1; }
|
||||
|
||||
explicit operator std::string_view() const
|
||||
{ return path; }
|
||||
|
||||
const std::string & abs() const
|
||||
{ return path; }
|
||||
|
||||
const char * c_str() const
|
||||
{ return path.c_str(); }
|
||||
|
||||
std::string_view rel() const
|
||||
{ return ((std::string_view) path).substr(1); }
|
||||
|
||||
struct Iterator
|
||||
{
|
||||
std::string_view remaining;
|
||||
size_t slash;
|
||||
|
||||
Iterator(std::string_view remaining)
|
||||
: remaining(remaining)
|
||||
, slash(remaining.find('/'))
|
||||
{ }
|
||||
|
||||
bool operator != (const Iterator & x) const
|
||||
{ return remaining.data() != x.remaining.data(); }
|
||||
|
||||
const std::string_view operator * () const
|
||||
{ return remaining.substr(0, slash); }
|
||||
|
||||
void operator ++ ()
|
||||
{
|
||||
if (slash == remaining.npos)
|
||||
remaining = remaining.substr(remaining.size());
|
||||
else {
|
||||
remaining = remaining.substr(slash + 1);
|
||||
slash = remaining.find('/');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Iterator begin() const { return Iterator(rel()); }
|
||||
Iterator end() const { return Iterator(rel().substr(path.size() - 1)); }
|
||||
|
||||
std::optional<CanonPath> parent() const;
|
||||
|
||||
/* Remove the last component. Panics if this path is the root. */
|
||||
void pop();
|
||||
|
||||
std::optional<std::string_view> dirOf() const
|
||||
{
|
||||
if (isRoot()) return std::nullopt;
|
||||
return path.substr(0, path.rfind('/'));
|
||||
}
|
||||
|
||||
std::optional<std::string_view> baseName() const
|
||||
{
|
||||
if (isRoot()) return std::nullopt;
|
||||
return ((std::string_view) path).substr(path.rfind('/') + 1);
|
||||
}
|
||||
|
||||
bool operator == (const CanonPath & x) const
|
||||
{ return path == x.path; }
|
||||
|
||||
bool operator != (const CanonPath & x) const
|
||||
{ return path != x.path; }
|
||||
|
||||
/* Compare paths lexicographically except that path separators
|
||||
are sorted before any other character. That is, in the sorted order
|
||||
a directory is always followed directly by its children. For
|
||||
instance, 'foo' < 'foo/bar' < 'foo!'. */
|
||||
bool operator < (const CanonPath & x) const
|
||||
{
|
||||
auto i = path.begin();
|
||||
auto j = x.path.begin();
|
||||
for ( ; i != path.end() && j != x.path.end(); ++i, ++j) {
|
||||
auto c_i = *i;
|
||||
if (c_i == '/') c_i = 0;
|
||||
auto c_j = *j;
|
||||
if (c_j == '/') c_j = 0;
|
||||
if (c_i < c_j) return true;
|
||||
if (c_i > c_j) return false;
|
||||
}
|
||||
return i == path.end() && j != x.path.end();
|
||||
}
|
||||
|
||||
/* Return true if `this` is equal to `parent` or a child of
|
||||
`parent`. */
|
||||
bool isWithin(const CanonPath & parent) const;
|
||||
|
||||
CanonPath removePrefix(const CanonPath & prefix) const;
|
||||
|
||||
/* Append another path to this one. */
|
||||
void extend(const CanonPath & x);
|
||||
|
||||
/* Concatenate two paths. */
|
||||
CanonPath operator + (const CanonPath & x) const;
|
||||
|
||||
/* Add a path component to this one. It must not contain any slashes. */
|
||||
void push(std::string_view c);
|
||||
|
||||
CanonPath operator + (std::string_view c) const;
|
||||
|
||||
/* Check whether access to this path is allowed, which is the case
|
||||
if 1) `this` is within any of the `allowed` paths; or 2) any of
|
||||
the `allowed` paths are within `this`. (The latter condition
|
||||
ensures access to the parents of allowed paths.) */
|
||||
bool isAllowed(const std::set<CanonPath> & allowed) const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & stream, const CanonPath & path);
|
||||
|
||||
}
|
|
@ -9,9 +9,9 @@ namespace nix {
|
|||
|
||||
const std::string nativeSystem = SYSTEM;
|
||||
|
||||
void BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
|
||||
void BaseError::addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint)
|
||||
{
|
||||
err.traces.push_front(Trace { .pos = e, .hint = hint });
|
||||
err.traces.push_front(Trace { .pos = std::move(e), .hint = hint });
|
||||
}
|
||||
|
||||
// c++ std::exception descendants must have a 'const char* what()' function.
|
||||
|
@ -30,91 +30,46 @@ const std::string & BaseError::calcWhat() const
|
|||
|
||||
std::optional<std::string> ErrorInfo::programName = std::nullopt;
|
||||
|
||||
std::ostream & operator<<(std::ostream & os, const hintformat & hf)
|
||||
std::ostream & operator <<(std::ostream & os, const hintformat & hf)
|
||||
{
|
||||
return os << hf.str();
|
||||
}
|
||||
|
||||
std::string showErrPos(const ErrPos & errPos)
|
||||
std::ostream & operator <<(std::ostream & str, const AbstractPos & pos)
|
||||
{
|
||||
if (errPos.line > 0) {
|
||||
if (errPos.column > 0) {
|
||||
return fmt("%d:%d", errPos.line, errPos.column);
|
||||
} else {
|
||||
return fmt("%d", errPos.line);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return "";
|
||||
}
|
||||
pos.print(str);
|
||||
str << ":" << pos.line;
|
||||
if (pos.column > 0)
|
||||
str << ":" << pos.column;
|
||||
return str;
|
||||
}
|
||||
|
||||
std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos)
|
||||
std::optional<LinesOfCode> AbstractPos::getCodeLines() const
|
||||
{
|
||||
if (errPos.line <= 0)
|
||||
if (line == 0)
|
||||
return std::nullopt;
|
||||
|
||||
if (errPos.origin == foFile) {
|
||||
LinesOfCode loc;
|
||||
try {
|
||||
// FIXME: when running as the daemon, make sure we don't
|
||||
// open a file to which the client doesn't have access.
|
||||
AutoCloseFD fd = open(errPos.file.c_str(), O_RDONLY | O_CLOEXEC);
|
||||
if (!fd) return {};
|
||||
if (auto source = getSource()) {
|
||||
|
||||
// count the newlines.
|
||||
int count = 0;
|
||||
std::string line;
|
||||
int pl = errPos.line - 1;
|
||||
do
|
||||
{
|
||||
line = readLine(fd.get());
|
||||
++count;
|
||||
if (count < pl)
|
||||
;
|
||||
else if (count == pl)
|
||||
loc.prevLineOfCode = line;
|
||||
else if (count == pl + 1)
|
||||
loc.errLineOfCode = line;
|
||||
else if (count == pl + 2) {
|
||||
loc.nextLineOfCode = line;
|
||||
break;
|
||||
}
|
||||
} while (true);
|
||||
return loc;
|
||||
}
|
||||
catch (EndOfFile & eof) {
|
||||
if (loc.errLineOfCode.has_value())
|
||||
return loc;
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
catch (std::exception & e) {
|
||||
return std::nullopt;
|
||||
}
|
||||
} else {
|
||||
std::istringstream iss(errPos.file);
|
||||
std::istringstream iss(*source);
|
||||
// count the newlines.
|
||||
int count = 0;
|
||||
std::string line;
|
||||
int pl = errPos.line - 1;
|
||||
std::string curLine;
|
||||
int pl = line - 1;
|
||||
|
||||
LinesOfCode loc;
|
||||
|
||||
do
|
||||
{
|
||||
std::getline(iss, line);
|
||||
do {
|
||||
std::getline(iss, curLine);
|
||||
++count;
|
||||
if (count < pl)
|
||||
{
|
||||
;
|
||||
}
|
||||
else if (count == pl) {
|
||||
loc.prevLineOfCode = line;
|
||||
loc.prevLineOfCode = curLine;
|
||||
} else if (count == pl + 1) {
|
||||
loc.errLineOfCode = line;
|
||||
loc.errLineOfCode = curLine;
|
||||
} else if (count == pl + 2) {
|
||||
loc.nextLineOfCode = line;
|
||||
loc.nextLineOfCode = curLine;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -124,12 +79,14 @@ std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos)
|
|||
|
||||
return loc;
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// print lines of code to the ostream, indicating the error column.
|
||||
void printCodeLines(std::ostream & out,
|
||||
const std::string & prefix,
|
||||
const ErrPos & errPos,
|
||||
const AbstractPos & errPos,
|
||||
const LinesOfCode & loc)
|
||||
{
|
||||
// previous line of code.
|
||||
|
@ -176,28 +133,6 @@ void printCodeLines(std::ostream & out,
|
|||
}
|
||||
}
|
||||
|
||||
void printAtPos(const ErrPos & pos, std::ostream & out)
|
||||
{
|
||||
if (pos) {
|
||||
switch (pos.origin) {
|
||||
case foFile: {
|
||||
out << fmt(ANSI_BLUE "at " ANSI_WARNING "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
|
||||
break;
|
||||
}
|
||||
case foString: {
|
||||
out << fmt(ANSI_BLUE "at " ANSI_WARNING "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
|
||||
break;
|
||||
}
|
||||
case foStdin: {
|
||||
out << fmt(ANSI_BLUE "at " ANSI_WARNING "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw Error("invalid FileOrigin in errPos");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static std::string indent(std::string_view indentFirst, std::string_view indentRest, std::string_view s)
|
||||
{
|
||||
std::string res;
|
||||
|
@ -264,18 +199,17 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
std::ostringstream oss;
|
||||
oss << einfo.msg << "\n";
|
||||
|
||||
if (einfo.errPos.has_value() && *einfo.errPos) {
|
||||
oss << "\n";
|
||||
printAtPos(*einfo.errPos, oss);
|
||||
auto noSource = ANSI_ITALIC " (source not available)" ANSI_NORMAL "\n";
|
||||
|
||||
auto loc = getCodeLines(*einfo.errPos);
|
||||
if (einfo.errPos) {
|
||||
oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *einfo.errPos << ANSI_NORMAL << ":";
|
||||
|
||||
// lines of code.
|
||||
if (loc.has_value()) {
|
||||
if (auto loc = einfo.errPos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *einfo.errPos, *loc);
|
||||
oss << "\n";
|
||||
}
|
||||
} else
|
||||
oss << noSource;
|
||||
}
|
||||
|
||||
auto suggestions = einfo.suggestions.trim();
|
||||
|
@ -290,17 +224,15 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter) {
|
||||
oss << "\n" << "… " << iter->hint.str() << "\n";
|
||||
|
||||
if (iter->pos.has_value() && (*iter->pos)) {
|
||||
auto pos = iter->pos.value();
|
||||
oss << "\n";
|
||||
printAtPos(pos, oss);
|
||||
if (iter->pos) {
|
||||
oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *iter->pos << ANSI_NORMAL << ":";
|
||||
|
||||
auto loc = getCodeLines(pos);
|
||||
if (loc.has_value()) {
|
||||
if (auto loc = iter->pos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", pos, *loc);
|
||||
printCodeLines(oss, "", *iter->pos, *loc);
|
||||
oss << "\n";
|
||||
}
|
||||
} else
|
||||
oss << noSource;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,13 +54,6 @@ typedef enum {
|
|||
lvlVomit
|
||||
} Verbosity;
|
||||
|
||||
/* adjust Pos::origin bit width when adding stuff here */
|
||||
typedef enum {
|
||||
foFile,
|
||||
foStdin,
|
||||
foString
|
||||
} FileOrigin;
|
||||
|
||||
// the lines of code surrounding an error.
|
||||
struct LinesOfCode {
|
||||
std::optional<std::string> prevLineOfCode;
|
||||
|
@ -68,54 +61,37 @@ struct LinesOfCode {
|
|||
std::optional<std::string> nextLineOfCode;
|
||||
};
|
||||
|
||||
// ErrPos indicates the location of an error in a nix file.
|
||||
struct ErrPos {
|
||||
int line = 0;
|
||||
int column = 0;
|
||||
std::string file;
|
||||
FileOrigin origin;
|
||||
/* An abstract type that represents a location in a source file. */
|
||||
struct AbstractPos
|
||||
{
|
||||
uint32_t line = 0;
|
||||
uint32_t column = 0;
|
||||
|
||||
operator bool() const
|
||||
{
|
||||
return line != 0;
|
||||
}
|
||||
/* Return the contents of the source file. */
|
||||
virtual std::optional<std::string> getSource() const
|
||||
{ return std::nullopt; };
|
||||
|
||||
// convert from the Pos struct, found in libexpr.
|
||||
template <class P>
|
||||
ErrPos & operator=(const P & pos)
|
||||
{
|
||||
origin = pos.origin;
|
||||
line = pos.line;
|
||||
column = pos.column;
|
||||
file = pos.file;
|
||||
return *this;
|
||||
}
|
||||
virtual void print(std::ostream & out) const = 0;
|
||||
|
||||
template <class P>
|
||||
ErrPos(const P & p)
|
||||
{
|
||||
*this = p;
|
||||
}
|
||||
std::optional<LinesOfCode> getCodeLines() const;
|
||||
};
|
||||
|
||||
std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos);
|
||||
std::ostream & operator << (std::ostream & str, const AbstractPos & pos);
|
||||
|
||||
void printCodeLines(std::ostream & out,
|
||||
const std::string & prefix,
|
||||
const ErrPos & errPos,
|
||||
const AbstractPos & errPos,
|
||||
const LinesOfCode & loc);
|
||||
|
||||
void printAtPos(const ErrPos & pos, std::ostream & out);
|
||||
|
||||
struct Trace {
|
||||
std::optional<ErrPos> pos;
|
||||
std::shared_ptr<AbstractPos> pos;
|
||||
hintformat hint;
|
||||
};
|
||||
|
||||
struct ErrorInfo {
|
||||
Verbosity level;
|
||||
hintformat msg;
|
||||
std::optional<ErrPos> errPos;
|
||||
std::shared_ptr<AbstractPos> errPos;
|
||||
std::list<Trace> traces;
|
||||
|
||||
Suggestions suggestions;
|
||||
|
@ -177,12 +153,12 @@ public:
|
|||
const ErrorInfo & info() const { calcWhat(); return err; }
|
||||
|
||||
template<typename... Args>
|
||||
void addTrace(std::optional<ErrPos> e, const std::string & fs, const Args & ... args)
|
||||
void addTrace(std::shared_ptr<AbstractPos> && e, const std::string & fs, const Args & ... args)
|
||||
{
|
||||
addTrace(e, hintfmt(fs, args...));
|
||||
addTrace(std::move(e), hintfmt(fs, args...));
|
||||
}
|
||||
|
||||
void addTrace(std::optional<ErrPos> e, hintformat hint);
|
||||
void addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint);
|
||||
|
||||
bool hasTrace() const { return !err.traces.empty(); }
|
||||
};
|
||||
|
|
|
@ -148,7 +148,7 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
|
|||
return f;
|
||||
}
|
||||
|
||||
inline hintformat hintfmt(std::string plain_string)
|
||||
inline hintformat hintfmt(const std::string & plain_string)
|
||||
{
|
||||
// we won't be receiving any args in this case, so just print the original string
|
||||
return hintfmt("%s", normaltxt(plain_string));
|
||||
|
|
|
@ -105,14 +105,6 @@ public:
|
|||
|
||||
Verbosity verbosity = lvlInfo;
|
||||
|
||||
void warnOnce(bool & haveWarned, const FormatOrString & fs)
|
||||
{
|
||||
if (!haveWarned) {
|
||||
warn(fs.s);
|
||||
haveWarned = true;
|
||||
}
|
||||
}
|
||||
|
||||
void writeToStderr(std::string_view s)
|
||||
{
|
||||
try {
|
||||
|
@ -130,11 +122,11 @@ Logger * makeSimpleLogger(bool printBuildLogs)
|
|||
return new SimpleLogger(printBuildLogs);
|
||||
}
|
||||
|
||||
std::atomic<uint64_t> nextId{(uint64_t) getpid() << 32};
|
||||
std::atomic<uint64_t> nextId{0};
|
||||
|
||||
Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type,
|
||||
const std::string & s, const Logger::Fields & fields, ActivityId parent)
|
||||
: logger(logger), id(nextId++)
|
||||
: logger(logger), id(nextId++ + (((uint64_t) getpid()) << 32))
|
||||
{
|
||||
logger.startActivity(id, lvl, type, s, fields, parent);
|
||||
}
|
||||
|
@ -186,10 +178,11 @@ struct JSONLogger : Logger {
|
|||
json["msg"] = oss.str();
|
||||
json["raw_msg"] = ei.msg.str();
|
||||
|
||||
if (ei.errPos.has_value() && (*ei.errPos)) {
|
||||
if (ei.errPos) {
|
||||
json["line"] = ei.errPos->line;
|
||||
json["column"] = ei.errPos->column;
|
||||
json["file"] = ei.errPos->file;
|
||||
//json["file"] = ei.errPos->file;
|
||||
json["file"] = nullptr;
|
||||
} else {
|
||||
json["line"] = nullptr;
|
||||
json["column"] = nullptr;
|
||||
|
@ -201,10 +194,11 @@ struct JSONLogger : Logger {
|
|||
for (auto iter = ei.traces.rbegin(); iter != ei.traces.rend(); ++iter) {
|
||||
nlohmann::json stackFrame;
|
||||
stackFrame["raw_msg"] = iter->hint.str();
|
||||
if (iter->pos.has_value() && (*iter->pos)) {
|
||||
if (iter->pos) {
|
||||
stackFrame["line"] = iter->pos->line;
|
||||
stackFrame["column"] = iter->pos->column;
|
||||
stackFrame["file"] = iter->pos->file;
|
||||
//stackFrame["file"] = iter->pos->file;
|
||||
stackFrame["file"] = nullptr;
|
||||
}
|
||||
traces.push_back(stackFrame);
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public:
|
|||
log(lvlInfo, fs);
|
||||
}
|
||||
|
||||
virtual void logEI(const ErrorInfo &ei) = 0;
|
||||
virtual void logEI(const ErrorInfo & ei) = 0;
|
||||
|
||||
void logEI(Verbosity lvl, ErrorInfo ei)
|
||||
{
|
||||
|
@ -225,7 +225,11 @@ inline void warn(const std::string & fs, const Args & ... args)
|
|||
logger->warn(f.str());
|
||||
}
|
||||
|
||||
void warnOnce(bool & haveWarned, const FormatOrString & fs);
|
||||
#define warnOnce(haveWarned, args...) \
|
||||
if (!haveWarned) { \
|
||||
haveWarned = true; \
|
||||
warn(args); \
|
||||
}
|
||||
|
||||
void writeToStderr(std::string_view s);
|
||||
|
||||
|
|
|
@ -83,6 +83,11 @@ public:
|
|||
return p != other.p;
|
||||
}
|
||||
|
||||
bool operator < (const ref<T> & other) const
|
||||
{
|
||||
return p < other.p;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
template<typename T2, typename... Args>
|
||||
|
|
|
@ -338,7 +338,7 @@ Sink & operator << (Sink & sink, const StringSet & s)
|
|||
|
||||
Sink & operator << (Sink & sink, const Error & ex)
|
||||
{
|
||||
auto info = ex.info();
|
||||
auto & info = ex.info();
|
||||
sink
|
||||
<< "Error"
|
||||
<< info.level
|
||||
|
|
|
@ -331,17 +331,9 @@ T readNum(Source & source)
|
|||
unsigned char buf[8];
|
||||
source((char *) buf, sizeof(buf));
|
||||
|
||||
uint64_t n =
|
||||
((uint64_t) buf[0]) |
|
||||
((uint64_t) buf[1] << 8) |
|
||||
((uint64_t) buf[2] << 16) |
|
||||
((uint64_t) buf[3] << 24) |
|
||||
((uint64_t) buf[4] << 32) |
|
||||
((uint64_t) buf[5] << 40) |
|
||||
((uint64_t) buf[6] << 48) |
|
||||
((uint64_t) buf[7] << 56);
|
||||
auto n = readLittleEndian<uint64_t>(buf);
|
||||
|
||||
if (n > (uint64_t)std::numeric_limits<T>::max())
|
||||
if (n > (uint64_t) std::numeric_limits<T>::max())
|
||||
throw SerialisationError("serialised integer %d is too large for type '%s'", n, typeid(T).name());
|
||||
|
||||
return (T) n;
|
||||
|
|
155
src/libutil/tests/canon-path.cc
Normal file
155
src/libutil/tests/canon-path.cc
Normal file
|
@ -0,0 +1,155 @@
|
|||
#include "canon-path.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(CanonPath, basic) {
|
||||
{
|
||||
CanonPath p("/");
|
||||
ASSERT_EQ(p.abs(), "/");
|
||||
ASSERT_EQ(p.rel(), "");
|
||||
ASSERT_EQ(p.baseName(), std::nullopt);
|
||||
ASSERT_EQ(p.dirOf(), std::nullopt);
|
||||
ASSERT_FALSE(p.parent());
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/foo//");
|
||||
ASSERT_EQ(p.abs(), "/foo");
|
||||
ASSERT_EQ(p.rel(), "foo");
|
||||
ASSERT_EQ(*p.baseName(), "foo");
|
||||
ASSERT_EQ(*p.dirOf(), ""); // FIXME: do we want this?
|
||||
ASSERT_EQ(p.parent()->abs(), "/");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("foo/bar");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
ASSERT_EQ(p.rel(), "foo/bar");
|
||||
ASSERT_EQ(*p.baseName(), "bar");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo");
|
||||
ASSERT_EQ(p.parent()->abs(), "/foo");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("foo//bar/");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
ASSERT_EQ(p.rel(), "foo/bar");
|
||||
ASSERT_EQ(*p.baseName(), "bar");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, pop) {
|
||||
CanonPath p("foo/bar/x");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar/x");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/foo");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/");
|
||||
}
|
||||
|
||||
TEST(CanonPath, removePrefix) {
|
||||
CanonPath p1("foo/bar");
|
||||
CanonPath p2("foo/bar/a/b/c");
|
||||
ASSERT_EQ(p2.removePrefix(p1).abs(), "/a/b/c");
|
||||
ASSERT_EQ(p1.removePrefix(p1).abs(), "/");
|
||||
ASSERT_EQ(p1.removePrefix(CanonPath("/")).abs(), "/foo/bar");
|
||||
}
|
||||
|
||||
TEST(CanonPath, iter) {
|
||||
{
|
||||
CanonPath p("a//foo/bar//");
|
||||
std::vector<std::string_view> ss;
|
||||
for (auto & c : p) ss.push_back(c);
|
||||
ASSERT_EQ(ss, std::vector<std::string_view>({"a", "foo", "bar"}));
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/");
|
||||
std::vector<std::string_view> ss;
|
||||
for (auto & c : p) ss.push_back(c);
|
||||
ASSERT_EQ(ss, std::vector<std::string_view>());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, concat) {
|
||||
{
|
||||
CanonPath p1("a//foo/bar//");
|
||||
CanonPath p2("xyzzy/bla");
|
||||
ASSERT_EQ((p1 + p2).abs(), "/a/foo/bar/xyzzy/bla");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p1("/");
|
||||
CanonPath p2("/a/b");
|
||||
ASSERT_EQ((p1 + p2).abs(), "/a/b");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p1("/a/b");
|
||||
CanonPath p2("/");
|
||||
ASSERT_EQ((p1 + p2).abs(), "/a/b");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/foo/bar");
|
||||
ASSERT_EQ((p + "x").abs(), "/foo/bar/x");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/");
|
||||
ASSERT_EQ((p + "foo" + "bar").abs(), "/foo/bar");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, within) {
|
||||
{
|
||||
ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo")));
|
||||
ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar")));
|
||||
ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/")));
|
||||
ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/")));
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, sort) {
|
||||
ASSERT_FALSE(CanonPath("foo") < CanonPath("foo"));
|
||||
ASSERT_TRUE (CanonPath("foo") < CanonPath("foo/bar"));
|
||||
ASSERT_TRUE (CanonPath("foo/bar") < CanonPath("foo!"));
|
||||
ASSERT_FALSE(CanonPath("foo!") < CanonPath("foo"));
|
||||
ASSERT_TRUE (CanonPath("foo") < CanonPath("foo!"));
|
||||
}
|
||||
|
||||
TEST(CanonPath, allowed) {
|
||||
{
|
||||
std::set<CanonPath> allowed {
|
||||
CanonPath("foo/bar"),
|
||||
CanonPath("foo!"),
|
||||
CanonPath("xyzzy"),
|
||||
CanonPath("a/b/c"),
|
||||
};
|
||||
|
||||
ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("foo").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("bar").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("/").isAllowed(allowed));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -311,6 +311,42 @@ namespace nix {
|
|||
ASSERT_THROW(base64Decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* getLine
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(getLine, all) {
|
||||
{
|
||||
auto [line, rest] = getLine("foo\nbar\nxyzzy");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "bar\nxyzzy");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo\r\nbar\r\nxyzzy");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "bar\r\nxyzzy");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo\n");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("");
|
||||
ASSERT_EQ(line, "");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* toLower
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
|
|
@ -1617,6 +1617,21 @@ std::string stripIndentation(std::string_view s)
|
|||
}
|
||||
|
||||
|
||||
std::pair<std::string_view, std::string_view> getLine(std::string_view s)
|
||||
{
|
||||
auto newline = s.find('\n');
|
||||
|
||||
if (newline == s.npos) {
|
||||
return {s, ""};
|
||||
} else {
|
||||
auto line = s.substr(0, newline);
|
||||
if (!line.empty() && line[line.size() - 1] == '\r')
|
||||
line = line.substr(0, line.size() - 1);
|
||||
return {line, s.substr(newline + 1)};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
|
||||
|
|
|
@ -510,6 +510,17 @@ std::optional<N> string2Float(const std::string_view s)
|
|||
}
|
||||
|
||||
|
||||
/* Convert a little-endian integer to host order. */
|
||||
template<typename T>
|
||||
T readLittleEndian(unsigned char * p)
|
||||
{
|
||||
T x = 0;
|
||||
for (size_t i = 0; i < sizeof(x); ++i)
|
||||
x |= ((T) *p++) << (i * 8);
|
||||
return x;
|
||||
}
|
||||
|
||||
|
||||
/* Return true iff `s' starts with `prefix'. */
|
||||
bool hasPrefix(std::string_view s, std::string_view prefix);
|
||||
|
||||
|
@ -563,6 +574,12 @@ std::string base64Decode(std::string_view s);
|
|||
std::string stripIndentation(std::string_view s);
|
||||
|
||||
|
||||
/* Get the prefix of 's' up to and excluding the next line break (LF
|
||||
optionally preceded by CR), and the remainder following the line
|
||||
break. */
|
||||
std::pair<std::string_view, std::string_view> getLine(std::string_view s);
|
||||
|
||||
|
||||
/* Get a value for the specified key from an associate container. */
|
||||
template <class T>
|
||||
const typename T::mapped_type * get(const T & map, const typename T::key_type & key)
|
||||
|
@ -737,4 +754,11 @@ inline std::string operator + (std::string && s, std::string_view s2)
|
|||
return std::move(s);
|
||||
}
|
||||
|
||||
inline std::string operator + (std::string_view s1, const char * s2)
|
||||
{
|
||||
std::string s(s1);
|
||||
s.append(s2);
|
||||
return s;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -289,7 +289,7 @@ static void main_nix_build(int argc, char * * argv)
|
|||
else
|
||||
for (auto i : left) {
|
||||
if (fromArgs)
|
||||
exprs.push_back(state->parseExprFromString(std::move(i), absPath(".")));
|
||||
exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath(absPath("."))));
|
||||
else {
|
||||
auto absolute = i;
|
||||
try {
|
||||
|
@ -301,8 +301,11 @@ static void main_nix_build(int argc, char * * argv)
|
|||
else
|
||||
/* If we're in a #! script, interpret filenames
|
||||
relative to the script. */
|
||||
exprs.push_back(state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state,
|
||||
inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i)))));
|
||||
exprs.push_back(
|
||||
state->parseExprFromFile(
|
||||
resolveExprPath(
|
||||
lookupFileArg(*state,
|
||||
inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -385,7 +388,9 @@ static void main_nix_build(int argc, char * * argv)
|
|||
if (!shell) {
|
||||
|
||||
try {
|
||||
auto expr = state->parseExprFromString("(import <nixpkgs> {}).bashInteractive", absPath("."));
|
||||
auto expr = state->parseExprFromString(
|
||||
"(import <nixpkgs> {}).bashInteractive",
|
||||
state->rootPath(absPath(".")));
|
||||
|
||||
Value v;
|
||||
state->eval(expr, v);
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include "filetransfer.hh"
|
||||
#include "store-api.hh"
|
||||
#include "legacy.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "tarball.hh"
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <regex>
|
||||
|
|
|
@ -44,7 +44,7 @@ typedef enum {
|
|||
struct InstallSourceInfo
|
||||
{
|
||||
InstallSourceType type;
|
||||
Path nixExprPath; /* for srcNixExprDrvs, srcNixExprs */
|
||||
std::shared_ptr<SourcePath> nixExprPath; /* for srcNixExprDrvs, srcNixExprs */
|
||||
Path profile; /* for srcProfile */
|
||||
std::string systemFilter; /* for srcNixExprDrvs */
|
||||
Bindings * autoArgs;
|
||||
|
@ -92,9 +92,11 @@ static bool parseInstallSourceOptions(Globals & globals,
|
|||
}
|
||||
|
||||
|
||||
static bool isNixExpr(const Path & path, struct stat & st)
|
||||
static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st)
|
||||
{
|
||||
return S_ISREG(st.st_mode) || (S_ISDIR(st.st_mode) && pathExists(path + "/default.nix"));
|
||||
return
|
||||
st.type == InputAccessor::tRegular
|
||||
|| (st.type == InputAccessor::tDirectory && (path + "default.nix").pathExists());
|
||||
}
|
||||
|
||||
|
||||
|
@ -102,10 +104,10 @@ static constexpr size_t maxAttrs = 1024;
|
|||
|
||||
|
||||
static void getAllExprs(EvalState & state,
|
||||
const Path & path, StringSet & seen, BindingsBuilder & attrs)
|
||||
const SourcePath & path, StringSet & seen, BindingsBuilder & attrs)
|
||||
{
|
||||
StringSet namesSorted;
|
||||
for (auto & i : readDirectory(path)) namesSorted.insert(i.name);
|
||||
for (auto & [name, _] : path.readDirectory()) namesSorted.insert(name);
|
||||
|
||||
for (auto & i : namesSorted) {
|
||||
/* Ignore the manifest.nix used by profiles. This is
|
||||
|
@ -113,13 +115,16 @@ static void getAllExprs(EvalState & state,
|
|||
are implemented using profiles). */
|
||||
if (i == "manifest.nix") continue;
|
||||
|
||||
Path path2 = path + "/" + i;
|
||||
SourcePath path2 = path + i;
|
||||
|
||||
struct stat st;
|
||||
if (stat(path2.c_str(), &st) == -1)
|
||||
InputAccessor::Stat st;
|
||||
try {
|
||||
st = path2.resolveSymlinks().lstat();
|
||||
} catch (Error &) {
|
||||
continue; // ignore dangling symlinks in ~/.nix-defexpr
|
||||
}
|
||||
|
||||
if (isNixExpr(path2, st) && (!S_ISREG(st.st_mode) || hasSuffix(path2, ".nix"))) {
|
||||
if (isNixExpr(path2, st) && (st.type != InputAccessor::tRegular || hasSuffix(path2.baseName(), ".nix"))) {
|
||||
/* Strip off the `.nix' filename suffix (if applicable),
|
||||
otherwise the attribute cannot be selected with the
|
||||
`-A' option. Useful if you want to stick a Nix
|
||||
|
@ -129,21 +134,20 @@ static void getAllExprs(EvalState & state,
|
|||
attrName = std::string(attrName, 0, attrName.size() - 4);
|
||||
if (!seen.insert(attrName).second) {
|
||||
std::string suggestionMessage = "";
|
||||
if (path2.find("channels") != std::string::npos && path.find("channels") != std::string::npos) {
|
||||
if (path2.path.abs().find("channels") != std::string::npos && path.path.abs().find("channels") != std::string::npos)
|
||||
suggestionMessage = fmt("\nsuggestion: remove '%s' from either the root channels or the user channels", attrName);
|
||||
}
|
||||
printError("warning: name collision in input Nix expressions, skipping '%1%'"
|
||||
"%2%", path2, suggestionMessage);
|
||||
continue;
|
||||
}
|
||||
/* Load the expression on demand. */
|
||||
auto vArg = state.allocValue();
|
||||
vArg->mkString(path2);
|
||||
vArg->mkPath(path2);
|
||||
if (seen.size() == maxAttrs)
|
||||
throw Error("too many Nix expressions in directory '%1%'", path);
|
||||
attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg);
|
||||
}
|
||||
else if (S_ISDIR(st.st_mode))
|
||||
else if (st.type == InputAccessor::tDirectory)
|
||||
/* `path2' is a directory (with no default.nix in it);
|
||||
recurse into it. */
|
||||
getAllExprs(state, path2, seen, attrs);
|
||||
|
@ -152,11 +156,9 @@ static void getAllExprs(EvalState & state,
|
|||
|
||||
|
||||
|
||||
static void loadSourceExpr(EvalState & state, const Path & path, Value & v)
|
||||
static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v)
|
||||
{
|
||||
struct stat st;
|
||||
if (stat(path.c_str(), &st) == -1)
|
||||
throw SysError("getting information about '%1%'", path);
|
||||
auto st = path.resolveSymlinks().lstat();
|
||||
|
||||
if (isNixExpr(path, st))
|
||||
state.evalFile(path, v);
|
||||
|
@ -167,7 +169,7 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v)
|
|||
set flat, not nested, to make it easier for a user to have a
|
||||
~/.nix-defexpr directory that includes some system-wide
|
||||
directory). */
|
||||
else if (S_ISDIR(st.st_mode)) {
|
||||
else if (st.type == InputAccessor::tDirectory) {
|
||||
auto attrs = state.buildBindings(maxAttrs);
|
||||
attrs.alloc("_combineChannels").mkList(0);
|
||||
StringSet seen;
|
||||
|
@ -179,7 +181,7 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v)
|
|||
}
|
||||
|
||||
|
||||
static void loadDerivations(EvalState & state, Path nixExprPath,
|
||||
static void loadDerivations(EvalState & state, const SourcePath & nixExprPath,
|
||||
std::string systemFilter, Bindings & autoArgs,
|
||||
const std::string & pathPrefix, DrvInfos & elems)
|
||||
{
|
||||
|
@ -390,7 +392,7 @@ static void queryInstSources(EvalState & state,
|
|||
/* Load the derivations from the (default or specified)
|
||||
Nix expression. */
|
||||
DrvInfos allElems;
|
||||
loadDerivations(state, instSource.nixExprPath,
|
||||
loadDerivations(state, *instSource.nixExprPath,
|
||||
instSource.systemFilter, *instSource.autoArgs, "", allElems);
|
||||
|
||||
elems = filterBySelector(state, allElems, args, newestOnly);
|
||||
|
@ -407,10 +409,10 @@ static void queryInstSources(EvalState & state,
|
|||
case srcNixExprs: {
|
||||
|
||||
Value vArg;
|
||||
loadSourceExpr(state, instSource.nixExprPath, vArg);
|
||||
loadSourceExpr(state, *instSource.nixExprPath, vArg);
|
||||
|
||||
for (auto & i : args) {
|
||||
Expr * eFun = state.parseExprFromString(i, absPath("."));
|
||||
Expr * eFun = state.parseExprFromString(i, state.rootPath(absPath(".")));
|
||||
Value vFun, vTmp;
|
||||
state.eval(eFun, vFun);
|
||||
vTmp.mkApp(&vFun, &vArg);
|
||||
|
@ -462,7 +464,7 @@ static void queryInstSources(EvalState & state,
|
|||
|
||||
case srcAttrPath: {
|
||||
Value vRoot;
|
||||
loadSourceExpr(state, instSource.nixExprPath, vRoot);
|
||||
loadSourceExpr(state, *instSource.nixExprPath, vRoot);
|
||||
for (auto & i : args) {
|
||||
Value & v(*findAlongAttrPath(state, i, *instSource.autoArgs, vRoot).first);
|
||||
getDerivations(state, v, "", *instSource.autoArgs, elems, true);
|
||||
|
@ -647,7 +649,7 @@ static void upgradeDerivations(Globals & globals,
|
|||
} else newElems.push_back(i);
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace(std::nullopt, "while trying to find an upgrade for '%s'", i.queryName());
|
||||
e.addTrace(nullptr, "while trying to find an upgrade for '%s'", i.queryName());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -954,7 +956,7 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin
|
|||
} catch (AssertionError & e) {
|
||||
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
|
||||
} catch (Error & e) {
|
||||
e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
|
||||
e.addTrace(nullptr, "while querying the derivation named '%1%'", i.queryName());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -1015,7 +1017,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
|||
installedElems = queryInstalled(*globals.state, globals.profile);
|
||||
|
||||
if (source == sAvailable || compareVersions)
|
||||
loadDerivations(*globals.state, globals.instSource.nixExprPath,
|
||||
loadDerivations(*globals.state, *globals.instSource.nixExprPath,
|
||||
globals.instSource.systemFilter, *globals.instSource.autoArgs,
|
||||
attrPath, availElems);
|
||||
|
||||
|
@ -1257,7 +1259,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
|||
} catch (AssertionError & e) {
|
||||
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
|
||||
} catch (Error & e) {
|
||||
e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
|
||||
e.addTrace(nullptr, "while querying the derivation named '%1%'", i.queryName());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -1374,23 +1376,24 @@ static int main_nix_env(int argc, char * * argv)
|
|||
Operation op = 0;
|
||||
RepairFlag repair = NoRepair;
|
||||
std::string file;
|
||||
Path nixExprPath;
|
||||
|
||||
Globals globals;
|
||||
|
||||
globals.instSource.type = srcUnknown;
|
||||
globals.instSource.nixExprPath = getHome() + "/.nix-defexpr";
|
||||
nixExprPath = getHome() + "/.nix-defexpr";
|
||||
globals.instSource.systemFilter = "*";
|
||||
|
||||
if (!pathExists(globals.instSource.nixExprPath)) {
|
||||
if (!pathExists(nixExprPath)) {
|
||||
try {
|
||||
createDirs(globals.instSource.nixExprPath);
|
||||
createDirs(nixExprPath);
|
||||
replaceSymlink(
|
||||
fmt("%s/profiles/per-user/%s/channels", settings.nixStateDir, getUserName()),
|
||||
globals.instSource.nixExprPath + "/channels");
|
||||
nixExprPath + "/channels");
|
||||
if (getuid() != 0)
|
||||
replaceSymlink(
|
||||
fmt("%s/profiles/per-user/root/channels", settings.nixStateDir),
|
||||
globals.instSource.nixExprPath + "/channels_root");
|
||||
nixExprPath + "/channels_root");
|
||||
} catch (Error &) { }
|
||||
}
|
||||
|
||||
|
@ -1474,8 +1477,10 @@ static int main_nix_env(int argc, char * * argv)
|
|||
globals.state = std::shared_ptr<EvalState>(new EvalState(myArgs.searchPath, store));
|
||||
globals.state->repair = repair;
|
||||
|
||||
if (file != "")
|
||||
globals.instSource.nixExprPath = lookupFileArg(*globals.state, file);
|
||||
globals.instSource.nixExprPath = std::make_shared<SourcePath>(
|
||||
file != ""
|
||||
? lookupFileArg(*globals.state, file)
|
||||
: globals.state->rootPath(nixExprPath));
|
||||
|
||||
globals.instSource.autoArgs = myArgs.getAutoArgs(*globals.state);
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ DrvInfos queryInstalled(EvalState & state, const Path & userEnv)
|
|||
Path manifestFile = userEnv + "/manifest.nix";
|
||||
if (pathExists(manifestFile)) {
|
||||
Value v;
|
||||
state.evalFile(manifestFile, v);
|
||||
state.evalFile(state.rootPath(manifestFile), v);
|
||||
Bindings & bindings(*state.allocBindings(0));
|
||||
getDerivations(state, v, "", bindings, elems, false);
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
|
|||
Value envBuilder;
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "buildenv.nix.gen.hh"
|
||||
, "/"), envBuilder);
|
||||
, state.rootPath("/")), envBuilder);
|
||||
|
||||
/* Construct a Nix expression that calls the user environment
|
||||
builder with the manifest as argument. */
|
||||
|
|
|
@ -168,9 +168,11 @@ static int main_nix_instantiate(int argc, char * * argv)
|
|||
|
||||
if (findFile) {
|
||||
for (auto & i : files) {
|
||||
Path p = state->findFile(i);
|
||||
if (p == "") throw Error("unable to find '%1%'", i);
|
||||
std::cout << p << std::endl;
|
||||
auto p = state->findFile(i);
|
||||
if (auto fn = p.getPhysicalPath())
|
||||
std::cout << fn->abs() << std::endl;
|
||||
else
|
||||
throw Error("'%s' has no physical path", p);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@ -184,8 +186,8 @@ static int main_nix_instantiate(int argc, char * * argv)
|
|||
|
||||
for (auto & i : files) {
|
||||
Expr * e = fromArgs
|
||||
? state->parseExprFromString(i, absPath("."))
|
||||
: state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, i))));
|
||||
? state->parseExprFromString(i, state->rootPath(absPath(".")))
|
||||
: state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i)));
|
||||
processExpr(*state, attrPaths, parseOnly, strict, autoArgs,
|
||||
evalOnly, outputKind, xmlOutputSourceLocation, e);
|
||||
}
|
||||
|
|
|
@ -926,7 +926,6 @@ static void opServe(Strings opFlags, Strings opArgs)
|
|||
worker_proto::write(*store, out, status.builtOutputs);
|
||||
}
|
||||
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -257,7 +257,7 @@ static void daemonLoop()
|
|||
} catch (Interrupted & e) {
|
||||
return;
|
||||
} catch (Error & error) {
|
||||
ErrorInfo ei = error.info();
|
||||
auto ei = error.info();
|
||||
// FIXME: add to trace?
|
||||
ei.msg = hintfmt("error processing connection: %1%", ei.msg.str());
|
||||
logError(ei);
|
||||
|
|
|
@ -559,7 +559,9 @@ struct CmdDevelop : Common, MixEnvironment
|
|||
// chdir if installable is a flake of type git+file or path
|
||||
auto installableFlake = std::dynamic_pointer_cast<InstallableFlake>(installable);
|
||||
if (installableFlake) {
|
||||
auto sourcePath = installableFlake->getLockedFlake()->flake.resolvedRef.input.getSourcePath();
|
||||
auto sourcePath = installableFlake->getLockedFlake()
|
||||
->flake.resolvedRef.input.getAccessor(store).first
|
||||
->root().getPhysicalPath();
|
||||
if (sourcePath) {
|
||||
if (chdir(sourcePath->c_str()) == -1) {
|
||||
throw SysError("chdir to '%s' failed", *sourcePath);
|
||||
|
|
|
@ -65,7 +65,7 @@ struct CmdEval : MixJSON, InstallableCommand
|
|||
|
||||
if (apply) {
|
||||
auto vApply = state->allocValue();
|
||||
state->eval(state->parseExprFromString(*apply, absPath(".")), *vApply);
|
||||
state->eval(state->parseExprFromString(*apply, state->rootPath(absPath("."))), *vApply);
|
||||
auto vRes = state->allocValue();
|
||||
state->callFunction(*vApply, *v, *vRes, noPos);
|
||||
v = vRes;
|
||||
|
|
|
@ -15,11 +15,10 @@ R""(
|
|||
# nix flake archive dwarffs
|
||||
```
|
||||
|
||||
* Print the store paths of the flake sources of NixOps without
|
||||
fetching them:
|
||||
* Copy and print the store paths of the flake sources of NixOps:
|
||||
|
||||
```console
|
||||
# nix flake archive --json --dry-run nixops
|
||||
# nix flake archive --json nixops
|
||||
```
|
||||
|
||||
# Description
|
||||
|
|
|
@ -2,21 +2,18 @@ R""(
|
|||
|
||||
# Examples
|
||||
|
||||
* Download a tarball and unpack it:
|
||||
* Download a tarball:
|
||||
|
||||
```console
|
||||
# nix flake prefetch https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.10.5.tar.xz
|
||||
Downloaded 'https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.10.5.tar.xz?narHash=sha256-3XYHZANT6AFBV0BqegkAZHbba6oeDkIUCDwbATLMhAY='
|
||||
to '/nix/store/sl5vvk8mb4ma1sjyy03kwpvkz50hd22d-source' (hash
|
||||
'sha256-3XYHZANT6AFBV0BqegkAZHbba6oeDkIUCDwbATLMhAY=').
|
||||
Fetched 'https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.10.5.tar.xz?narHash=sha256-3XYHZANT6AFBV0BqegkAZHbba6oeDkIUCDwbATLMhAY='.
|
||||
```
|
||||
|
||||
* Download the `dwarffs` flake (looked up in the flake registry):
|
||||
|
||||
```console
|
||||
# nix flake prefetch dwarffs --json
|
||||
{"hash":"sha256-VHg3MYVgQ12LeRSU2PSoDeKlSPD8PYYEFxxwkVVDRd0="
|
||||
,"storePath":"/nix/store/hang3792qwdmm2n0d9nsrs5n6bsws6kv-source"}
|
||||
{}
|
||||
```
|
||||
|
||||
# Description
|
||||
|
|
|
@ -16,7 +16,7 @@ R""(
|
|||
# Description
|
||||
|
||||
This command recreates the lock file of a flake (`flake.lock`), thus
|
||||
updating the lock for every mutable input (like `nixpkgs`) to its
|
||||
updating the lock for every unlocked input (like `nixpkgs`) to its
|
||||
current version. This is equivalent to passing `--recreate-lock-file`
|
||||
to any command that operates on a flake. That is,
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue