Merge remote-tracking branch 'upstream/master' into add-body-to-network-errors

This commit is contained in:
John Ericson 2020-07-03 17:08:39 +00:00
commit 465daa9396
90 changed files with 1375 additions and 623 deletions

6
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View file

@ -10,5 +10,5 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v8
- uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings

View file

@ -97,7 +97,7 @@ $ rm -rf /nix
installation on your system:
</para>
<screen>sh &lt;(curl https://nixos.org/nix/install) --daemon</screen>
<screen>sh &lt;(curl -L https://nixos.org/nix/install) --daemon</screen>
<para>
The multi-user installation of Nix will create build users between
@ -178,7 +178,7 @@ sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
is a bit of a misnomer). To use this approach, just install Nix with:
</para>
<screen>$ sh &lt;(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<screen>$ sh &lt;(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<para>
If you don't like the sound of this, you'll want to weigh the
@ -429,7 +429,7 @@ LABEL=Nix\040Store /nix apfs rw,nobrowse
NixOS.org installation script:
<screen>
sh &lt;(curl https://nixos.org/nix/install)
sh &lt;(curl -L https://nixos.org/nix/install)
</screen>
</para>

28
mk/run_test.sh Executable file
View file

@ -0,0 +1,28 @@
#!/bin/sh
set -u
red=""
green=""
yellow=""
normal=""
post_run_msg="ran test $1..."
if [ -t 1 ]; then
red=""
green=""
yellow=""
normal=""
fi
(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
status=$?
if [ $status -eq 0 ]; then
echo "$post_run_msg [${green}PASS$normal]"
elif [ $status -eq 99 ]; then
echo "$post_run_msg [${yellow}SKIP$normal]"
else
echo "$post_run_msg [${red}FAIL$normal]"
echo "$log" | sed 's/^/ /'
exit "$status"
fi

View file

@ -1,45 +1,15 @@
# Run program $1 as part of make installcheck.
test-deps =
define run-install-test
installcheck: $1
installcheck: $1.test
_installcheck-list += $1
.PHONY: $1.test
$1.test: $1 $(test-deps)
@env TEST_NAME=$(notdir $(basename $1)) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1
endef
# Color code from https://unix.stackexchange.com/a/10065
installcheck:
@total=0; failed=0; \
red=""; \
green=""; \
yellow=""; \
normal=""; \
if [ -t 1 ]; then \
red=""; \
green=""; \
yellow=""; \
normal=""; \
fi; \
for i in $(_installcheck-list); do \
total=$$((total + 1)); \
printf "running test $$i..."; \
log="$$(cd $$(dirname $$i) && $(tests-environment) $$(basename $$i) 2>&1)"; \
status=$$?; \
if [ $$status -eq 0 ]; then \
echo " [$${green}PASS$$normal]"; \
elif [ $$status -eq 99 ]; then \
echo " [$${yellow}SKIP$$normal]"; \
else \
echo " [$${red}FAIL$$normal]"; \
echo "$$log" | sed 's/^/ /'; \
failed=$$((failed + 1)); \
fi; \
done; \
if [ "$$failed" != 0 ]; then \
echo "$${red}$$failed out of $$total tests failed $$normal"; \
exit 1; \
else \
echo "$${green}All tests succeeded$$normal"; \
fi
.PHONY: check installcheck

View file

@ -526,7 +526,7 @@ This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is
run in a headless fashion, like this:
$ curl https://nixos.org/nix/install | sh
$ curl -L https://nixos.org/nix/install | sh
or maybe in a CI pipeline. Because of that, we're going to skip the
verbose output in the interest of brevity.
@ -534,7 +534,7 @@ verbose output in the interest of brevity.
If you would like to
see the output, try like this:
$ curl -o install-nix https://nixos.org/nix/install
$ curl -L -o install-nix https://nixos.org/nix/install
$ sh ./install-nix
EOF

View file

@ -113,7 +113,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
(
echo ""
echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume."
echo "Use sh <(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "Use sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "See https://nixos.org/nix/manual/#sect-macos-installation"
echo ""
) >&2

View file

@ -130,7 +130,7 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
Symbol file = state.symbols.create(filename);
return { file, lineno, 0 };
return { foFile, file, lineno, 0 };
}

View file

@ -78,7 +78,7 @@ public:
if (!a)
throw Error({
.hint = hintfmt("attribute '%s' missing", name),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
return *a;

View file

@ -11,7 +11,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{
throw EvalError({
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -25,7 +25,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{
throw TypeError({
.hint = hintfmt(s, showType(v)),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}

View file

@ -529,7 +529,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{
throw EvalError({
.hint = hintfmt(s, s2),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -542,7 +542,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{
throw EvalError({
.hint = hintfmt(s, s2, s3),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -551,7 +551,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
// p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError({
.hint = hintfmt(s, sym, p2),
.nixCode = NixCode { .errPos = p1 }
.errPos = p1
});
}
@ -559,7 +559,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{
throw TypeError({
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -572,7 +572,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{
throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -580,7 +580,7 @@ LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s,
{
throw AssertionError({
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -588,23 +588,18 @@ LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char *
{
throw UndefinedVarError({
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2))
LocalNoInline(void addErrorTrace(Error & e, const char * s, const string & s2))
{
e.addPrefix(format(s) % s2);
e.addTrace(std::nullopt, s, s2);
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const ExprLambda & fun, const Pos & pos))
LocalNoInline(void addErrorTrace(Error & e, const Pos & pos, const char * s, const string & s2))
{
e.addPrefix(format(s) % fun.showNamePos() % pos);
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2, const Pos & pos))
{
e.addPrefix(format(s) % s2 % pos);
e.addTrace(pos, s, s2);
}
@ -818,7 +813,7 @@ void EvalState::evalFile(const Path & path_, Value & v)
try {
eval(e, v);
} catch (Error & e) {
addErrorPrefix(e, "while evaluating the file '%1%':\n", path2);
addErrorTrace(e, "while evaluating the file '%1%':", path2);
throw;
}
@ -1068,8 +1063,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
} catch (Error & e) {
if (pos2 && pos2->file != state.sDerivationNix)
addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n",
showAttrPath(state, env, attrPath), *pos2);
addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath));
throw;
}
@ -1237,11 +1232,15 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
/* Evaluate the body. This is conditional on showTrace, because
catching exceptions makes this function not tail-recursive. */
if (settings.showTrace)
if (loggerSettings.showTrace.get())
try {
lambda.body->eval(*this, env2, v);
} catch (Error & e) {
addErrorPrefix(e, "while evaluating %1%, called from %2%:\n", lambda, pos);
addErrorTrace(e, lambda.pos, "while evaluating %s",
(lambda.name.set()
? "'" + (string) lambda.name + "'"
: "anonymous lambdaction"));
addErrorTrace(e, pos, "from call site%s", "");
throw;
}
else
@ -1516,7 +1515,7 @@ void EvalState::forceValueDeep(Value & v)
try {
recurse(*i.value);
} catch (Error & e) {
addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n", i.name, *i.pos);
addErrorTrace(e, *i.pos, "while evaluating the attribute '%1%'", i.name);
throw;
}
}
@ -1936,7 +1935,7 @@ string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, boo
{
throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}

View file

@ -250,7 +250,7 @@ private:
friend struct ExprAttrs;
friend struct ExprLet;
Expr * parse(const char * text, const Path & path,
Expr * parse(const char * text, FileOrigin origin, const Path & path,
const Path & basePath, StaticEnv & staticEnv);
public:

View file

@ -1,7 +1,7 @@
#include "get-drvs.hh"
#include "util.hh"
#include "eval-inline.hh"
#include "derivations.hh"
#include "store-api.hh"
#include <cstring>
#include <regex>

View file

@ -197,7 +197,22 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
if (!pos)
str << "undefined position";
else
str << (format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%") % (string) pos.file % pos.line % pos.column).str();
{
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
switch (pos.origin) {
case foFile:
f % (string) pos.file;
break;
case foStdin:
case foString:
f % "(string)";
break;
default:
throw Error("unhandled Pos origin!");
}
str << (f % pos.line % pos.column).str();
}
return str;
}
@ -270,7 +285,7 @@ void ExprVar::bindVars(const StaticEnv & env)
if (withLevel == -1)
throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
fromWith = true;
this->level = withLevel;

View file

@ -24,11 +24,12 @@ MakeError(RestrictedPathError, Error);
struct Pos
{
FileOrigin origin;
Symbol file;
unsigned int line, column;
Pos() : line(0), column(0) { };
Pos(const Symbol & file, unsigned int line, unsigned int column)
: file(file), line(line), column(column) { };
Pos() : origin(foString), line(0), column(0) { };
Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column)
: origin(origin), file(file), line(line), column(column) { };
operator bool() const
{
return line != 0;
@ -238,7 +239,7 @@ struct ExprLambda : Expr
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
};
void setName(Symbol & name);

View file

@ -30,7 +30,8 @@ namespace nix {
SymbolTable & symbols;
Expr * result;
Path basePath;
Symbol path;
Symbol file;
FileOrigin origin;
ErrorInfo error;
Symbol sLetBody;
ParseData(EvalState & state)
@ -65,18 +66,17 @@ namespace nix {
static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos)
{
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%",
.hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
.nixCode = NixCode { .errPos = pos },
.errPos = pos
});
}
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.nixCode = NixCode { .errPos = pos },
.errPos = pos
});
}
@ -148,7 +148,7 @@ static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'",
formal.name),
.nixCode = NixCode { .errPos = pos },
.errPos = pos
});
formals->formals.push_front(formal);
}
@ -246,7 +246,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
{
return Pos(data->path, loc.first_line, loc.first_column);
return Pos(data->origin, data->file, loc.first_line, loc.first_column);
}
#define CUR_POS makeCurPos(*yylocp, data)
@ -259,7 +259,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
{
data->error = {
.hint = hintfmt(error),
.nixCode = NixCode { .errPos = makeCurPos(*loc, data) }
.errPos = makeCurPos(*loc, data)
};
}
@ -339,7 +339,7 @@ expr_function
{ if (!$2->dynamicAttrs.empty())
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"),
.nixCode = NixCode { .errPos = CUR_POS },
.errPos = CUR_POS
});
$$ = new ExprLet($2, $4);
}
@ -419,7 +419,7 @@ expr_simple
if (noURLLiterals)
throw ParseError({
.hint = hintfmt("URL literals are disabled"),
.nixCode = NixCode { .errPos = CUR_POS }
.errPos = CUR_POS
});
$$ = new ExprString(data->symbols.create($1));
}
@ -492,7 +492,7 @@ attrs
} else
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"),
.nixCode = NixCode { .errPos = makeCurPos(@2, data) },
.errPos = makeCurPos(@2, data)
});
}
| { $$ = new AttrPath; }
@ -569,13 +569,24 @@ formal
namespace nix {
Expr * EvalState::parse(const char * text,
Expr * EvalState::parse(const char * text, FileOrigin origin,
const Path & path, const Path & basePath, StaticEnv & staticEnv)
{
yyscan_t scanner;
ParseData data(*this);
data.origin = origin;
switch (origin) {
case foFile:
data.file = data.symbols.create(path);
break;
case foStdin:
case foString:
data.file = data.symbols.create(text);
break;
default:
assert(false);
}
data.basePath = basePath;
data.path = data.symbols.create(path);
yylex_init(&scanner);
yy_scan_string(text, scanner);
@ -625,13 +636,13 @@ Expr * EvalState::parseExprFromFile(const Path & path)
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
{
return parse(readFile(path).c_str(), path, dirOf(path), staticEnv);
return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
}
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
{
return parse(s.data(), "(string)", basePath, staticEnv);
return parse(s.data(), foString, "", basePath, staticEnv);
}
@ -644,7 +655,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
Expr * EvalState::parseStdin()
{
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
return parseExprFromString(drainFD(0), absPath("."));
return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
}
@ -693,7 +704,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}

View file

@ -96,7 +96,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -177,7 +177,7 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
.hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -215,7 +215,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (count == 0) {
throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
PathSet context;
@ -230,7 +230,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -239,13 +239,13 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try {
parsed = state.parseExprFromString(output, pos.file);
} catch (Error & e) {
e.addPrefix(fmt("While parsing the output from '%1%', at %2%\n", program, pos));
e.addTrace(pos, "While parsing the output from '%1%'", program);
throw;
}
try {
state.eval(parsed, v);
} catch (Error & e) {
e.addPrefix(fmt("While evaluating the output from '%1%', at %2%\n", program, pos));
e.addTrace(pos, "While evaluating the output from '%1%'", program);
throw;
}
}
@ -385,7 +385,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (startSet == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'startSet' required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.forceList(*startSet->value, pos);
@ -399,7 +399,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (op == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.forceValue(*op->value, pos);
@ -421,7 +421,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (key == e->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.forceValue(*key->value, pos);
@ -471,7 +471,7 @@ static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * a
v = *args[1];
} catch (Error & e) {
PathSet context;
e.addPrefix(format("%1%\n") % state.coerceToString(pos, *args[0], context));
e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context));
throw;
}
}
@ -556,14 +556,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (attr == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("required attribute 'name' missing"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
string drvName;
Pos & posDrvName(*attr->pos);
try {
drvName = state.forceStringNoCtx(*attr->value, pos);
} catch (Error & e) {
e.addPrefix(fmt("while evaluating the derivation attribute 'name' at %1%:\n", posDrvName));
e.addTrace(posDrvName, "while evaluating the derivation attribute 'name'");
throw;
}
@ -603,7 +603,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else
throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
};
@ -613,7 +613,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.find(j) != outputs.end())
throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
/* !!! Check whether j is a valid attribute
name. */
@ -623,14 +623,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (j == "drv")
throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
outputs.insert(j);
}
if (outputs.empty())
throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
};
@ -696,8 +696,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
} catch (Error & e) {
e.addPrefix(format("while evaluating the attribute '%1%' of the derivation '%2%' at %3%:\n")
% key % drvName % posDrvName);
e.addTrace(posDrvName,
"while evaluating the attribute '%1%' of the derivation '%2%'",
key, drvName);
throw;
}
}
@ -745,20 +746,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (drv.builder == "")
throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
if (drv.platform == "")
throw EvalError({
.hint = hintfmt("required attribute 'system' missing"),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
/* Check whether the derivation name is valid. */
if (isDerivation(drvName))
throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
if (outputHash) {
@ -766,20 +767,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.nixCode = NixCode { .errPos = posDrvName }
.errPos = posDrvName
});
HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo);
std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo);
Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput {
std::move(outPath),
(ingestionMethod == FileIngestionMethod::Recursive ? "r:" : "")
+ printHashType(h.type),
h.to_string(Base16, false),
.path = std::move(outPath),
.hash = FixedOutputHash {
.method = ingestionMethod,
.hash = std::move(h),
},
});
}
@ -793,7 +794,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = "";
drv.outputs.insert_or_assign(i,
DerivationOutput { StorePath::dummy, "", "" });
DerivationOutput {
.path = StorePath::dummy,
.hash = std::optional<FixedOutputHash> {},
});
}
Hash h = hashDerivationModulo(*state.store, Derivation(drv), true);
@ -802,7 +806,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
auto outPath = state.store->makeOutputPath(i, h, drvName);
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i,
DerivationOutput { std::move(outPath), "", "" });
DerivationOutput {
.path = std::move(outPath),
.hash = std::optional<FixedOutputHash>(),
});
}
}
@ -874,7 +881,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
if (!state.store->isInStore(path))
throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
Path path2 = state.store->toStorePath(path);
if (!settings.readOnlyMode)
@ -895,7 +902,7 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
.hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -941,7 +948,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
@ -972,7 +979,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
if (i == v2.attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'path' missing"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
PathSet context;
@ -983,7 +990,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -999,17 +1006,17 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
string type = state.forceStringNoCtx(*args[0], pos);
HashType ht = parseHashType(type);
if (ht == htUnknown)
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
PathSet context; // discarded
Path p = state.coerceToPath(pos, *args[1], context);
mkString(v, hashFile(ht, state.checkSourcePath(p)).to_string(Base16, false), context);
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
}
/* Read a directory (without . or ..) */
@ -1022,7 +1029,7 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
@ -1098,7 +1105,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)",
name, path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
refs.insert(state.store->parseStorePath(path));
}
@ -1169,7 +1176,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
if (!context.empty())
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.forceValue(*args[0], pos);
@ -1178,7 +1185,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
@ -1201,7 +1208,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
if (!context.empty())
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = *attr.pos }
.errPos = *attr.pos
});
} else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
@ -1215,13 +1222,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
else
throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
.errPos = *attr.pos
});
}
if (path.empty())
throw EvalError({
.hint = hintfmt("'path' required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
if (name.empty())
name = baseNameOf(path);
@ -1282,7 +1289,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (i == args[1]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
@ -1365,7 +1372,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j == v2.attrs->end())
throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
string name = state.forceStringNoCtx(*j->value, pos);
@ -1375,7 +1382,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j2 == v2.attrs->end())
throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
}
@ -1451,7 +1458,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
if (args[0]->type != tLambda)
throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
if (!args[0]->lambda.fun->matchAttrs) {
@ -1507,7 +1514,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
if (n < 0 || (unsigned int) n >= list.listSize())
throw Error({
.hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
@ -1537,7 +1544,7 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
if (args[0]->listSize() == 0)
throw Error({
.hint = hintfmt("'tail' called on an empty list"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.mkList(v, args[0]->listSize() - 1);
@ -1682,7 +1689,7 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
if (len < 0)
throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
state.mkList(v, len);
@ -1844,7 +1851,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (f2 == 0)
throw EvalError({
.hint = hintfmt("division by zero"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
if (args[0]->type == tFloat || args[1]->type == tFloat) {
@ -1856,7 +1863,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError({
.hint = hintfmt("overflow in integer division"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
mkInt(v, i1 / i2);
@ -1917,7 +1924,7 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
if (start < 0)
throw EvalError({
.hint = hintfmt("negative start position in 'substring'"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
@ -1936,17 +1943,17 @@ static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args
static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
string type = state.forceStringNoCtx(*args[0], pos);
HashType ht = parseHashType(type);
if (ht == htUnknown)
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
mkString(v, hashString(ht, s).to_string(Base16, false), context);
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
}
@ -1986,12 +1993,12 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
} else {
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
}
@ -2059,12 +2066,12 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
} else {
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
}
@ -2098,7 +2105,7 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
if (args[0]->listSize() != args[1]->listSize())
throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
vector<string> from;

View file

@ -1,6 +1,6 @@
#include "primops.hh"
#include "eval-inline.hh"
#include "derivations.hh"
#include "store-api.hh"
namespace nix {
@ -148,7 +148,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!state.store->isStorePath(i.name))
throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name),
.nixCode = NixCode { .errPos = *i.pos }
.errPos = *i.pos
});
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(i.name));
@ -165,7 +165,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!isDerivation(i.name)) {
throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
.errPos = *i.pos
});
}
context.insert("=" + string(i.name));
@ -178,7 +178,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
.errPos = *i.pos
});
}
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {

View file

@ -37,14 +37,14 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
.errPos = *attr.pos
});
}
if (url.empty())
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
} else

View file

@ -40,14 +40,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
.errPos = *attr.pos
});
}
if (url.empty())
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
} else

View file

@ -68,7 +68,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
if (!attrs.count("type"))
throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
input = fetchers::inputFromAttrs(attrs);
@ -112,14 +112,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.nixCode = NixCode { .errPos = *attr.pos }
.errPos = *attr.pos
});
}
if (!url)
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
} else
url = state.forceStringNoCtx(*args[0], pos);

View file

@ -83,7 +83,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
} catch (std::runtime_error & e) {
throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
.nixCode = NixCode { .errPos = pos }
.errPos = pos
});
}
}

View file

@ -6,6 +6,8 @@ namespace nix::fetchers {
struct Cache
{
virtual ~Cache() { }
virtual void add(
ref<Store> store,
const Attrs & inAttrs,

View file

@ -36,7 +36,7 @@ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
if (res) {
if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
// FIXME: require SRI hash.
res->narHash = newHashAllowEmpty(*narHash, htUnknown);
res->narHash = newHashAllowEmpty(*narHash, {});
return res;
}
}

View file

@ -70,7 +70,10 @@ DownloadFileResult downloadFile(
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = hash,
};
auto source = StringSource { *sink.s };
store->addToStore(info, source, NoRepair, NoCheckSigs);
storePath = std::move(info.path);
@ -264,7 +267,7 @@ struct TarballInputScheme : InputScheme
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
if (auto hash = maybeGetStrAttr(attrs, "hash"))
input->hash = newHashAllowEmpty(*hash, htUnknown);
input->hash = newHashAllowEmpty(*hash, {});
return input;
}

View file

@ -26,7 +26,7 @@ Logger * makeDefaultLogger() {
case LogFormat::rawWithLogs:
return makeSimpleLogger(true);
case LogFormat::internalJson:
return makeJSONLogger(*makeSimpleLogger());
return makeJSONLogger(*makeSimpleLogger(true));
case LogFormat::bar:
return makeProgressBar();
case LogFormat::barWithLogs:

View file

@ -131,7 +131,7 @@ public:
auto state(state_.lock());
std::stringstream oss;
oss << ei;
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(*state, ei.level, oss.str());
}

View file

@ -323,10 +323,8 @@ int handleExceptions(const string & programName, std::function<void()> fun)
printError("Try '%1% --help' for more information.", programName);
return 1;
} catch (BaseError & e) {
if (settings.showTrace && e.prefix() != "")
printError(e.prefix());
logError(e.info());
if (e.prefix() != "" && !settings.showTrace)
if (e.hasTrace() && !loggerSettings.showTrace.get())
printError("(use '--show-trace' to show detailed location information)");
return e.status;
} catch (std::bad_alloc & e) {

View file

@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
/* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case? */
if (errno != EMLINK)
bind-mount in this case?
It can also fail with EPERM in BeegFS v7 and earlier versions
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM)
throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to);
}
@ -2750,8 +2753,8 @@ struct RestrictedStore : public LocalFSStore
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
{ }
StorePathSet queryDerivationOutputs(const StorePath & path) override
{ throw Error("queryDerivationOutputs"); }
OutputPathMap queryDerivationOutputMap(const StorePath & path) override
{ throw Error("queryDerivationOutputMap"); }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); }
@ -3714,14 +3717,11 @@ void DerivationGoal::registerOutputs()
/* Check that fixed-output derivations produced the right
outputs (i.e., the content hash should match the specified
hash). */
std::string ca;
std::optional<ContentAddress> ca;
if (fixedOutput) {
FileIngestionMethod outputHashMode; Hash h;
i.second.parseHashInfo(outputHashMode, h);
if (outputHashMode == FileIngestionMethod::Flat) {
if (i.second.hash->method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
throw BuildError(
@ -3732,20 +3732,22 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */
Hash h2 = outputHashMode == FileIngestionMethod::Recursive
? hashPath(h.type, actualPath).first
: hashFile(h.type, actualPath);
Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive
? hashPath(*i.second.hash->hash.type, actualPath).first
: hashFile(*i.second.hash->hash.type, actualPath);
auto dest = worker.store.makeFixedOutputPath(outputHashMode, h2, i.second.path.name());
auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name());
if (h != h2) {
if (i.second.hash->hash != h2) {
/* Throw an error after registering the path as
valid. */
worker.hashMismatch = true;
delayedException = std::make_exception_ptr(
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
worker.store.printStorePath(dest), h.to_string(SRI, true), h2.to_string(SRI, true)));
worker.store.printStorePath(dest),
i.second.hash->hash.to_string(SRI, true),
h2.to_string(SRI, true)));
Path actualDest = worker.store.Store::toRealPath(dest);
@ -3765,7 +3767,10 @@ void DerivationGoal::registerOutputs()
else
assert(worker.store.parseStorePath(path) == dest);
ca = makeFixedOutputCA(outputHashMode, h2);
ca = FixedOutputHash {
.method = i.second.hash->method,
.hash = h2,
};
}
/* Get rid of all weird permissions. This also checks that
@ -3838,7 +3843,10 @@ void DerivationGoal::registerOutputs()
info.ca = ca;
worker.store.signPathInfo(info);
if (!info.references.empty()) info.ca.clear();
if (!info.references.empty()) {
// FIXME don't we have an experimental feature for fixed output with references?
info.ca = {};
}
infos.emplace(i.first, std::move(info));
}
@ -4998,7 +5006,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}

View file

@ -58,14 +58,17 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
}
};
/* We always have one output, and if it's a fixed-output derivation (as
checked below) it must be the only output */
auto & output = drv.outputs.begin()->second;
/* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat")
if (output.hash && output.hash->method == FileIngestionMethod::Flat)
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
auto ht = parseHashType(getAttr("outputHashAlgo"));
auto h = Hash(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
auto & h = output.hash->hash;
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
return;
} catch (Error & e) {
debug(e.what());

View file

@ -0,0 +1,85 @@
#include "content-address.hh"
namespace nix {
std::string FixedOutputHash::printMethodAlgo() const {
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
}
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
switch (m) {
case FileIngestionMethod::Flat:
return "";
case FileIngestionMethod::Recursive:
return "r:";
default:
throw Error("impossible, caught both cases");
}
}
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
{
return "fixed:"
+ makeFileIngestionPrefix(method)
+ hash.to_string(Base32, true);
}
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string renderContentAddress(ContentAddress ca) {
return std::visit(overloaded {
[](TextHash th) {
return "text:" + th.hash.to_string(Base32, true);
},
[](FixedOutputHash fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
}
}, ca);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto prefixSeparator = rawCa.find(':');
if (prefixSeparator != string::npos) {
auto prefix = string(rawCa, 0, prefixSeparator);
if (prefix == "text") {
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
Hash hash = Hash(string(hashTypeAndHash));
if (*hash.type != htSHA256) {
throw Error("parseContentAddress: the text hash should have type SHA256");
}
return TextHash { hash };
} else if (prefix == "fixed") {
// This has to be an inverse of makeFixedOutputCA
auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos);
if (methodAndHash.substr(0,2) == "r:") {
std::string_view hashRaw = methodAndHash.substr(2,string::npos);
return FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = Hash(string(hashRaw)),
};
} else {
std::string_view hashRaw = methodAndHash;
return FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = Hash(string(hashRaw)),
};
}
} else {
throw Error("parseContentAddress: format not recognized; has to be text or fixed");
}
} else {
throw Error("Not a content address because it lacks an appropriate prefix");
}
};
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
};
std::string renderContentAddress(std::optional<ContentAddress> ca) {
return ca ? renderContentAddress(*ca) : "";
}
}

View file

@ -0,0 +1,56 @@
#pragma once
#include <variant>
#include "hash.hh"
namespace nix {
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct TextHash {
Hash hash;
};
/// Pair of a hash, and how the file system was ingested
struct FixedOutputHash {
FileIngestionMethod method;
Hash hash;
std::string printMethodAlgo() const;
};
/*
We've accumulated several types of content-addressed paths over the years;
fixed-output derivations support multiple hash algorithms and serialisation
methods (flat file vs NAR). Thus, ca has one of the following forms:
* text:sha256:<sha256 hash of file contents>: For paths
computed by makeTextPath() / addTextToStore().
* fixed:<r?>:<ht>:<h>: For paths computed by
makeFixedOutputPath() / addToStore().
*/
typedef std::variant<
TextHash, // for paths computed by makeTextPath() / addTextToStore
FixedOutputHash // for path computed by makeFixedOutputPath
> ContentAddress;
/* Compute the prefix to the hash algorithm which indicates how the files were
ingested. */
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
std::string renderContentAddress(ContentAddress ca);
std::string renderContentAddress(std::optional<ContentAddress> ca);
ContentAddress parseContentAddress(std::string_view rawCa);
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
}

View file

@ -78,10 +78,10 @@ struct TunnelLogger : public Logger
if (ei.level > verbosity) return;
std::stringstream oss;
oss << ei;
showErrorInfo(oss, ei, false);
StringSink buf;
buf << STDERR_NEXT << oss.str() << "\n"; // (fs.s + "\n");
buf << STDERR_NEXT << oss.str() << "\n";
enqueueMsg(*buf.s);
}
@ -347,6 +347,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
case wopQueryDerivationOutputMap: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
OutputPathMap outputs = store->queryDerivationOutputMap(path);
logger->stopWork();
writeOutputPathMap(*store, to, outputs);
break;
}
case wopQueryDeriver: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
@ -652,7 +661,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< info->ca;
<< renderContentAddress(info->ca);
}
} else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
@ -710,7 +719,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from);
from >> info.ca >> repair >> dontCheckSigs;
info.ca = parseContentAddressOpt(readString(from));
from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs)
dontCheckSigs = false;
if (!trusted)

View file

@ -8,25 +8,6 @@
namespace nix {
void DerivationOutput::parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const
{
recursive = FileIngestionMethod::Flat;
string algo = hashAlgo;
if (string(algo, 0, 2) == "r:") {
recursive = FileIngestionMethod::Recursive;
algo = string(algo, 2);
}
HashType hashType = parseHashType(algo);
if (hashType == htUnknown)
throw Error("unknown hash algorithm '%s'", algo);
hash = Hash(this->hash, hashType);
}
const StorePath & BasicDerivation::findOutput(const string & id) const
{
auto i = outputs.find(id);
@ -120,6 +101,34 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
}
static DerivationOutput parseDerivationOutput(const Store & store, istringstream_nocopy & str)
{
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
const HashType hashType = parseHashType(hashAlgo);
fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
};
}
return DerivationOutput {
.path = std::move(path),
.hash = std::move(fsh),
};
}
static Derivation parseDerivation(const Store & store, const string & s)
{
Derivation drv;
@ -129,15 +138,8 @@ static Derivation parseDerivation(const Store & store, const string & s)
/* Parse the list of outputs. */
while (!endOfList(str)) {
expect(str, "("); std::string id = parseString(str);
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); auto hashAlgo = parseString(str);
expect(str, ","); auto hash = parseString(str);
expect(str, ")");
drv.outputs.emplace(id, DerivationOutput {
.path = std::move(path),
.hashAlgo = std::move(hashAlgo),
.hash = std::move(hash)
});
auto output = parseDerivationOutput(store, str);
drv.outputs.emplace(std::move(id), std::move(output));
}
/* Parse the list of input derivations. */
@ -263,8 +265,9 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first);
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path));
s += ','; printUnquotedString(s, i.second.hashAlgo);
s += ','; printUnquotedString(s, i.second.hash);
s += ','; printUnquotedString(s, i.second.hash ? i.second.hash->printMethodAlgo() : "");
s += ','; printUnquotedString(s,
i.second.hash ? i.second.hash->hash.to_string(Base16, false) : "");
s += ')';
}
@ -320,7 +323,7 @@ bool BasicDerivation::isFixedOutput() const
{
return outputs.size() == 1 &&
outputs.begin()->first == "out" &&
outputs.begin()->second.hash != "";
outputs.begin()->second.hash;
}
@ -353,8 +356,8 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
if (drv.isFixedOutput()) {
DerivationOutputs::const_iterator i = drv.outputs.begin();
return hashString(htSHA256, "fixed:out:"
+ i->second.hashAlgo + ":"
+ i->second.hash + ":"
+ i->second.hash->printMethodAlgo() + ":"
+ i->second.hash->hash.to_string(Base16, false) + ":"
+ store.printStorePath(i->second.path));
}
@ -397,6 +400,31 @@ StorePathSet BasicDerivation::outputPaths() const
return paths;
}
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in);
auto hash = readString(in);
std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
auto hashType = parseHashType(hashAlgo);
fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
};
}
return DerivationOutput {
.path = std::move(path),
.hash = std::move(fsh),
};
}
StringSet BasicDerivation::outputNames() const
{
@ -413,14 +441,8 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
auto nr = readNum<size_t>(in);
for (size_t n = 0; n < nr; n++) {
auto name = readString(in);
auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in);
auto hash = readString(in);
drv.outputs.emplace(name, DerivationOutput {
.path = std::move(path),
.hashAlgo = std::move(hashAlgo),
.hash = std::move(hash)
});
auto output = readDerivationOutput(in, store);
drv.outputs.emplace(std::move(name), std::move(output));
}
drv.inputSrcs = readStorePaths<StorePathSet>(store, in);
@ -441,8 +463,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
for (auto & i : drv.outputs)
out << i.first << store.printStorePath(i.second.path) << i.second.hashAlgo << i.second.hash;
for (auto & i : drv.outputs) {
out << i.first
<< store.printStorePath(i.second.path);
if (i.second.hash) {
out << i.second.hash->printMethodAlgo()
<< i.second.hash->hash.to_string(Base16, false);
} else {
out << "" << "";
}
}
writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
out << drv.env.size();

View file

@ -1,8 +1,9 @@
#pragma once
#include "path.hh"
#include "types.hh"
#include "hash.hh"
#include "store-api.hh"
#include "content-address.hh"
#include <map>
@ -15,9 +16,7 @@ namespace nix {
struct DerivationOutput
{
StorePath path;
std::string hashAlgo; /* hash used for expected hash computation */
std::string hash; /* expected hash, may be null */
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
};
typedef std::map<string, DerivationOutput> DerivationOutputs;
@ -70,6 +69,7 @@ struct Derivation : BasicDerivation
class Store;
enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(ref<Store> store,

View file

@ -55,7 +55,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */
Hash hash = hashAndWriteSink.currentHash();
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));

View file

@ -35,7 +35,7 @@ Settings::Settings()
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH))
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
{
buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";

View file

@ -196,10 +196,6 @@ public:
/* Whether to lock the Nix client and worker to the same CPU. */
bool lockCPU;
/* Whether to show a stack trace if Nix evaluation fails. */
Setting<bool> showTrace{this, false, "show-trace",
"Whether to show a stack trace on evaluation errors."};
Setting<SandboxMode> sandboxMode{this,
#if __linux__
smEnabled

View file

@ -114,7 +114,7 @@ struct LegacySSHStore : public Store
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
auto s = readString(conn->from);
info->narHash = s.empty() ? Hash() : Hash(s);
conn->from >> info->ca;
info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from);
}
@ -146,7 +146,7 @@ struct LegacySSHStore : public Store
<< info.narSize
<< info.ultimate
<< info.sigs
<< info.ca;
<< renderContentAddress(info.ca);
try {
copyNAR(source, conn->to);
} catch (...) {

View file

@ -561,10 +561,12 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
if (out == drv.outputs.end())
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
FileIngestionMethod method; Hash h;
out->second.parseHashInfo(method, h);
check(makeFixedOutputPath(method, h, drvName), out->second.path, "out");
check(
makeFixedOutputPath(
out->second.hash->method,
out->second.hash->hash,
drvName),
out->second.path, "out");
}
else {
@ -578,7 +580,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs)
{
if (info.ca != "" && !info.isContentAddressed(*this))
if (info.ca.has_value() && !info.isContentAddressed(*this))
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
printStorePath(info.path));
@ -590,7 +592,7 @@ uint64_t LocalStore::addValidPath(State & state,
(info.narSize, info.narSize != 0)
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty())
(renderContentAddress(info.ca), (bool) info.ca)
.exec();
uint64_t id = sqlite3_last_insert_rowid(state.db);
@ -664,7 +666,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
if (s) info->ca = s;
if (s) info->ca = parseContentAddressOpt(s);
/* Get the references. */
auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
@ -687,7 +689,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
(info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty())
(renderContentAddress(info.ca), (bool) info.ca)
(printStorePath(info.path))
.exec();
}
@ -772,17 +774,20 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
}
StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path)
OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
{
return retrySQLite<StorePathSet>([&]() {
return retrySQLite<OutputPathMap>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
StorePathSet outputs;
OutputPathMap outputs;
while (useQueryDerivationOutputs.next())
outputs.insert(parseStorePath(useQueryDerivationOutputs.getStr(1)));
outputs.emplace(
useQueryDerivationOutputs.getStr(0),
parseStorePath(useQueryDerivationOutputs.getStr(1))
);
return outputs;
});
@ -983,15 +988,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath);
if (info.ca != "" &&
!((hasPrefix(info.ca, "text:") && !info.references.count(info.path))
|| info.references.empty()))
// text hashing has long been allowed to have non-self-references because it is used for drv files.
bool refersToSelf = info.references.count(info.path) > 0;
if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
settings.requireExperimentalFeature("ca-references");
/* While restoring the path from the NAR, compute the hash
of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink;
if (info.ca == "" || !info.references.count(info.path))
if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256);
else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
@ -1077,7 +1082,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
ValidPathInfo info(dstPath);
info.narHash = hash.first;
info.narSize = hash.second;
info.ca = makeFixedOutputCA(method, h);
info.ca = FixedOutputHash { .method = method, .hash = h };
registerValidPath(info);
}
@ -1141,7 +1146,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
info.narHash = narHash;
info.narSize = sink.s->size();
info.references = references;
info.ca = "text:" + hash.to_string(Base32, true);
info.ca = TextHash { .hash = hash };
registerValidPath(info);
}
@ -1252,10 +1257,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink;
if (info->ca == "" || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash.type);
if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(*info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();

View file

@ -133,7 +133,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override;
StorePathSet queryDerivationOutputs(const StorePath & path) override;
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;

View file

@ -203,7 +203,7 @@ public:
narInfo->deriver = StorePath(queryNAR.getStr(9));
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
narInfo->sigs.insert(sig);
narInfo->ca = queryNAR.getStr(11);
narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
return {oValid, narInfo};
});
@ -237,7 +237,7 @@ public:
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
(concatStringsSep(" ", info->sigs))
(info->ca)
(renderContentAddress(info->ca))
(time(0)).exec();
} else {

View file

@ -67,8 +67,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "Sig")
sigs.insert(value);
else if (name == "CA") {
if (!ca.empty()) corrupt();
ca = value;
if (ca) corrupt();
// FIXME: allow blank ca or require skipping field?
ca = parseContentAddressOpt(value);
}
pos = eol + 1;
@ -104,8 +105,8 @@ std::string NarInfo::to_string(const Store & store) const
for (auto sig : sigs)
res += "Sig: " + sig + "\n";
if (!ca.empty())
res += "CA: " + ca + "\n";
if (ca)
res += "CA: " + renderContentAddress(*ca) + "\n";
return res;
}

View file

@ -1,4 +1,4 @@
#include "derivations.hh"
#include "store-api.hh"
#include <nlohmann/json_fwd.hpp>

View file

@ -1,5 +1,6 @@
#pragma once
#include "content-address.hh"
#include "types.hh"
namespace nix {
@ -61,15 +62,11 @@ public:
typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths;
typedef std::map<string, StorePath> OutputPathMap;
/* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv";
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct StorePathWithOutputs
{
StorePath path;

View file

@ -8,6 +8,7 @@
#include "derivations.hh"
#include "pool.hh"
#include "finally.hh"
#include "logging.hh"
#include <sys/types.h>
#include <sys/stat.h>
@ -38,6 +39,29 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
out << store.printStorePath(i);
}
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
{
std::map<string, StorePath> pathMap;
auto rawInput = readStrings<Strings>(from);
if (rawInput.size() % 2)
throw Error("got an odd number of elements from the daemon when trying to read a output path map");
auto curInput = rawInput.begin();
while (curInput != rawInput.end()) {
auto thisKey = *curInput++;
auto thisValue = *curInput++;
pathMap.emplace(thisKey, store.parseStorePath(thisValue));
}
return pathMap;
}
void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
{
out << 2*pathMap.size();
for (auto & i : pathMap) {
out << i.first;
out << store.printStorePath(i.second);
}
}
/* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
@ -197,7 +221,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.maxSilentTime.name);
overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name);
overrides.erase(settings.showTrace.name);
overrides.erase(loggerSettings.showTrace.name);
conn.to << overrides.size();
for (auto & i : overrides)
conn.to << i.first << i.second.value;
@ -381,7 +405,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
conn->from >> info->ca;
info->ca = parseContentAddressOpt(readString(conn->from));
}
}
callback(std::move(info));
@ -412,12 +436,24 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
{
auto conn(getConnection());
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0x16) {
return Store::queryDerivationOutputs(path);
}
conn->to << wopQueryDerivationOutputs << printStorePath(path);
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
}
OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
return readOutputPathMap(*this, conn->from);
}
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{
auto conn(getConnection());
@ -465,7 +501,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << info.ca
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs;
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
if (!tunnel) copyNAR(source, conn->to);

View file

@ -51,6 +51,7 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;

View file

@ -173,20 +173,20 @@ static std::string makeType(
StorePath Store::makeFixedOutputPath(
FileIngestionMethod recursive,
FileIngestionMethod method,
const Hash & hash,
std::string_view name,
const StorePathSet & references,
bool hasSelfReference) const
{
if (hash.type == htSHA256 && recursive == FileIngestionMethod::Recursive) {
if (hash.type == htSHA256 && method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
} else {
assert(references.empty());
return makeStorePath("output:out",
hashString(htSHA256,
"fixed:out:"
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
+ makeFileIngestionPrefix(method)
+ hash.to_string(Base16, true) + ":"),
name);
}
@ -242,6 +242,16 @@ bool Store::PathInfoCacheValue::isKnownNow()
return std::chrono::steady_clock::now() < time_point + ttl;
}
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
{
auto outputMap = this->queryDerivationOutputMap(path);
StorePathSet outputPaths;
for (auto & i: outputMap) {
outputPaths.emplace(std::move(i.second));
}
return outputPaths;
}
bool Store::isValidPath(const StorePath & storePath)
{
std::string hashPart(storePath.hashPart());
@ -471,8 +481,8 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
jsonRefs.elem(printStorePath(ref));
}
if (info->ca != "")
jsonPath.attr("ca", info->ca);
if (info->ca)
jsonPath.attr("ca", renderContentAddress(info->ca));
std::pair<uint64_t, uint64_t> closureSizes;
@ -757,41 +767,35 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
sigs.insert(secretKey.signDetached(fingerprint(store)));
}
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
bool ValidPathInfo::isContentAddressed(const Store & store) const
{
auto warn = [&]() {
logWarning(
ErrorInfo{
.name = "Path not content-addressed",
.hint = hintfmt("path '%s' claims to be content-addressed but isn't", store.printStorePath(path))
});
};
if (! ca) return false;
if (hasPrefix(ca, "text:")) {
Hash hash(ca.substr(5));
if (store.makeTextPath(path.name(), hash, references) == path)
return true;
else
warn();
}
else if (hasPrefix(ca, "fixed:")) {
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
auto refs = references;
bool hasSelfReference = false;
if (refs.count(path)) {
hasSelfReference = true;
refs.erase(path);
auto caPath = std::visit(overloaded {
[&](TextHash th) {
return store.makeTextPath(path.name(), th.hash, references);
},
[&](FixedOutputHash fsh) {
auto refs = references;
bool hasSelfReference = false;
if (refs.count(path)) {
hasSelfReference = true;
refs.erase(path);
}
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
}
if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path)
return true;
else
warn();
}
}, *ca);
return false;
bool res = caPath == path;
if (!res)
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
return res;
}
@ -822,14 +826,6 @@ Strings ValidPathInfo::shortRefs() const
}
std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
{
return "fixed:"
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
+ hash.to_string(Base32, true);
}
}

View file

@ -2,12 +2,14 @@
#include "path.hh"
#include "hash.hh"
#include "content-address.hh"
#include "serialise.hh"
#include "crypto.hh"
#include "lru-cache.hh"
#include "sync.hh"
#include "globals.hh"
#include "config.hh"
#include "derivations.hh"
#include <atomic>
#include <limits>
@ -17,6 +19,7 @@
#include <memory>
#include <string>
#include <chrono>
#include <variant>
namespace nix {
@ -31,15 +34,12 @@ MakeError(SubstituterDisabled, Error);
MakeError(NotInStore, Error);
struct BasicDerivation;
struct Derivation;
class FSAccessor;
class NarInfoDiskCache;
class Store;
class JSONPlaceholder;
enum RepairFlag : bool { NoRepair = false, Repair = true };
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
@ -111,7 +111,6 @@ struct SubstitutablePathInfo
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
struct ValidPathInfo
{
StorePath path;
@ -140,21 +139,11 @@ struct ValidPathInfo
that a particular output path was produced by a derivation; the
path then implies the contents.)
Ideally, the content-addressability assertion would just be a
Boolean, and the store path would be computed from
the name component, narHash and references. However,
1) we've accumulated several types of content-addressed paths
over the years; and 2) fixed-output derivations support
multiple hash algorithms and serialisation methods (flat file
vs NAR). Thus, ca has one of the following forms:
* text:sha256:<sha256 hash of file contents>: For paths
computed by makeTextPath() / addTextToStore().
* fixed:<r?>:<ht>:<h>: For paths computed by
makeFixedOutputPath() / addToStore().
Ideally, the content-addressability assertion would just be a Boolean,
and the store path would be computed from the name component, narHash
and references. However, we support many types of content addresses.
*/
std::string ca;
std::optional<ContentAddress> ca;
bool operator == (const ValidPathInfo & i) const
{
@ -189,9 +178,10 @@ struct ValidPathInfo
Strings shortRefs() const;
ValidPathInfo(const StorePath & path) : path(path) { }
ValidPathInfo(const ValidPathInfo & other) = default;
ValidPathInfo(StorePath && path) : path(std::move(path)) { }
ValidPathInfo(StorePath && path) : path(std::move(path)) { };
ValidPathInfo(const StorePath & path) : path(path) { };
virtual ~ValidPathInfo() { }
};
@ -428,8 +418,11 @@ public:
virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; };
/* Query the outputs of the derivation denoted by `path'. */
virtual StorePathSet queryDerivationOutputs(const StorePath & path)
{ unsupported("queryDerivationOutputs"); }
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
/* Query the mapping outputName=>outputPath for the given derivation */
virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
{ unsupported("queryDerivationOutputMap"); }
/* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */
@ -838,12 +831,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
std::istream & str,
bool hashGiven = false);
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
/* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x115
#define PROTOCOL_VERSION 0x116
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -30,7 +30,7 @@ typedef enum {
wopSetOptions = 19,
wopCollectGarbage = 20,
wopQuerySubstitutablePathInfo = 21,
wopQueryDerivationOutputs = 22,
wopQueryDerivationOutputs = 22, // obsolete
wopQueryAllValidPaths = 23,
wopQueryFailedPaths = 24,
wopClearFailedPaths = 25,
@ -49,6 +49,7 @@ typedef enum {
wopNarFromPath = 38,
wopAddToStoreNar = 39,
wopQueryMissing = 40,
wopQueryDerivationOutputMap = 41,
} WorkerOp;
@ -69,5 +70,6 @@ template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
}

View file

@ -262,7 +262,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
names[name] = 0;
}
} else if (s == "node") {
if (s.empty()) throw badArchive("entry name missing");
if (name.empty()) throw badArchive("entry name missing");
parse(sink, source, path + "/" + name);
} else
throw badArchive("unknown field " + s);

View file

@ -162,8 +162,18 @@ Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
.labels = {"hash-algo"},
.handler = {[ht](std::string s) {
*ht = parseHashType(s);
if (*ht == htUnknown)
throw UsageError("unknown hash type '%1%'", s);
}}
};
}
Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht)
{
return Flag {
.longName = std::move(longName),
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
.labels = {"hash-algo"},
.handler = {[oht](std::string s) {
*oht = std::optional<HashType> { parseHashType(s) };
}}
};
}

View file

@ -85,6 +85,7 @@ protected:
Handler handler;
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
};
std::map<std::string, Flag::ptr> longFlags;

View file

@ -7,14 +7,11 @@
namespace nix {
const std::string nativeSystem = SYSTEM;
// addPrefix is used for show-trace. Strings added with addPrefix
// will print ahead of the error itself.
BaseError & BaseError::addPrefix(const FormatOrString & fs)
BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
{
prefix_ = fs.s + prefix_;
err.traces.push_front(Trace { .pos = e, .hint = hint});
return *this;
}
@ -28,7 +25,7 @@ const string& BaseError::calcWhat() const
err.name = sname();
std::ostringstream oss;
oss << err;
showErrorInfo(oss, err, false);
what_ = oss.str();
return *what_;
@ -56,28 +53,114 @@ string showErrPos(const ErrPos &errPos)
}
}
// if nixCode contains lines of code, print them to the ostream, indicating the error column.
void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixCode)
std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
{
if (errPos.line <= 0)
return std::nullopt;
if (errPos.origin == foFile) {
LinesOfCode loc;
try {
AutoCloseFD fd = open(errPos.file.c_str(), O_RDONLY | O_CLOEXEC);
if (!fd) {
logError(SysError("opening file '%1%'", errPos.file).info());
return std::nullopt;
}
else
{
// count the newlines.
int count = 0;
string line;
int pl = errPos.line - 1;
do
{
line = readLine(fd.get());
++count;
if (count < pl)
{
;
}
else if (count == pl) {
loc.prevLineOfCode = line;
} else if (count == pl + 1) {
loc.errLineOfCode = line;
} else if (count == pl + 2) {
loc.nextLineOfCode = line;
break;
}
} while (true);
return loc;
}
}
catch (EndOfFile &eof) {
if (loc.errLineOfCode.has_value())
return loc;
else
return std::nullopt;
}
catch (std::exception &e) {
printError("error reading nix file: %s\n%s", errPos.file, e.what());
return std::nullopt;
}
} else {
std::istringstream iss(errPos.file);
// count the newlines.
int count = 0;
string line;
int pl = errPos.line - 1;
LinesOfCode loc;
do
{
std::getline(iss, line);
++count;
if (count < pl)
{
;
}
else if (count == pl) {
loc.prevLineOfCode = line;
} else if (count == pl + 1) {
loc.errLineOfCode = line;
} else if (count == pl + 2) {
loc.nextLineOfCode = line;
break;
}
if (!iss.good())
break;
} while (true);
return loc;
}
}
// print lines of code to the ostream, indicating the error column.
void printCodeLines(std::ostream &out,
const string &prefix,
const ErrPos &errPos,
const LinesOfCode &loc)
{
// previous line of code.
if (nixCode.prevLineOfCode.has_value()) {
if (loc.prevLineOfCode.has_value()) {
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line - 1),
*nixCode.prevLineOfCode);
prefix,
(errPos.line - 1),
*loc.prevLineOfCode);
}
if (nixCode.errLineOfCode.has_value()) {
if (loc.errLineOfCode.has_value()) {
// line of code containing the error.
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line),
*nixCode.errLineOfCode);
prefix,
(errPos.line),
*loc.errLineOfCode);
// error arrows for the column range.
if (nixCode.errPos.column > 0) {
int start = nixCode.errPos.column;
if (errPos.column > 0) {
int start = errPos.column;
std::string spaces;
for (int i = 0; i < start; ++i) {
spaces.append(" ");
@ -87,23 +170,49 @@ void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixC
out << std::endl
<< fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL,
prefix,
spaces,
arrows);
prefix,
spaces,
arrows);
}
}
// next line of code.
if (nixCode.nextLineOfCode.has_value()) {
if (loc.nextLineOfCode.has_value()) {
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line + 1),
*nixCode.nextLineOfCode);
prefix,
(errPos.line + 1),
*loc.nextLineOfCode);
}
}
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
void printAtPos(const string &prefix, const ErrPos &pos, std::ostream &out)
{
if (pos)
{
switch (pos.origin) {
case foFile: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " in file: " << ANSI_NORMAL << pos.file;
break;
}
case foString: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " from string" << ANSI_NORMAL;
break;
}
case foStdin: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " from stdin" << ANSI_NORMAL;
break;
}
default:
throw Error("invalid FileOrigin in errPos");
}
}
}
std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace)
{
auto errwidth = std::max<size_t>(getWindowSize().second, 20);
string prefix = "";
@ -158,8 +267,12 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
}
}
auto ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length();
auto dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl;
auto ndl = prefix.length()
+ filterANSIEscapes(levelString, true).length()
+ 7
+ einfo.name.length()
+ einfo.programName.value_or("").length();
auto dashwidth = std::max<int>(errwidth - ndl, 3);
std::string dashes(dashwidth, '-');
@ -179,16 +292,9 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
einfo.programName.value_or(""));
bool nl = false; // intersperse newline between sections.
if (einfo.nixCode.has_value()) {
if (einfo.nixCode->errPos.file != "") {
// filename, line, column.
out << std::endl << fmt("%1%in file: " ANSI_BLUE "%2% %3%" ANSI_NORMAL,
prefix,
einfo.nixCode->errPos.file,
showErrPos(einfo.nixCode->errPos));
} else {
out << std::endl << fmt("%1%from command line argument", prefix);
}
if (einfo.errPos.has_value() && (*einfo.errPos)) {
out << prefix << std::endl;
printAtPos(prefix, *einfo.errPos, out);
nl = true;
}
@ -200,12 +306,16 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true;
}
// lines of code.
if (einfo.nixCode.has_value() && einfo.nixCode->errLineOfCode.has_value()) {
if (nl)
out << std::endl << prefix;
printCodeLines(out, prefix, *einfo.nixCode);
nl = true;
if (einfo.errPos.has_value() && (*einfo.errPos)) {
auto loc = getCodeLines(*einfo.errPos);
// lines of code.
if (loc.has_value()) {
if (nl)
out << std::endl << prefix;
printCodeLines(out, prefix, *einfo.errPos, *loc);
nl = true;
}
}
// hint
@ -216,6 +326,59 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true;
}
// traces
if (showTrace && !einfo.traces.empty())
{
const string tracetitle(" show-trace ");
int fill = errwidth - tracetitle.length();
int lw = 0;
int rw = 0;
const int min_dashes = 3;
if (fill > min_dashes * 2) {
if (fill % 2 != 0) {
lw = fill / 2;
rw = lw + 1;
}
else
{
lw = rw = fill / 2;
}
}
else
lw = rw = min_dashes;
if (nl)
out << std::endl << prefix;
out << ANSI_BLUE << std::string(lw, '-') << tracetitle << std::string(rw, '-') << ANSI_NORMAL;
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
{
try {
out << std::endl << prefix;
out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
nl = true;
if (*iter->pos) {
auto pos = iter->pos.value();
out << std::endl << prefix;
printAtPos(prefix, pos, out);
auto loc = getCodeLines(pos);
if (loc.has_value())
{
out << std::endl << prefix;
printCodeLines(out, prefix, pos, *loc);
out << std::endl << prefix;
}
}
} catch(const std::bad_optional_access& e) {
out << iter->hint.str() << std::endl;
}
}
}
return out;
}
}

View file

@ -25,20 +25,20 @@ namespace nix {
/*
This file defines two main structs/classes used in nix error handling.
This file defines two main structs/classes used in nix error handling.
ErrorInfo provides a standard payload of error information, with conversion to string
happening in the logger rather than at the call site.
ErrorInfo provides a standard payload of error information, with conversion to string
happening in the logger rather than at the call site.
BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
an ErrorInfo.
BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
an ErrorInfo.
ErrorInfo structs are sent to the logger as part of an exception, or directly with the
logError or logWarning macros.
ErrorInfo structs are sent to the logger as part of an exception, or directly with the
logError or logWarning macros.
See the error-demo.cc program for usage examples.
See the error-demo.cc program for usage examples.
*/
*/
typedef enum {
lvlError = 0,
@ -50,11 +50,25 @@ typedef enum {
lvlVomit
} Verbosity;
typedef enum {
foFile,
foStdin,
foString
} FileOrigin;
// the lines of code surrounding an error.
struct LinesOfCode {
std::optional<string> prevLineOfCode;
std::optional<string> errLineOfCode;
std::optional<string> nextLineOfCode;
};
// ErrPos indicates the location of an error in a nix file.
struct ErrPos {
int line = 0;
int column = 0;
string file;
FileOrigin origin;
operator bool() const
{
@ -65,9 +79,14 @@ struct ErrPos {
template <class P>
ErrPos& operator=(const P &pos)
{
origin = pos.origin;
line = pos.line;
column = pos.column;
file = pos.file;
// is file symbol null?
if (pos.file.set())
file = pos.file;
else
file = "";
return *this;
}
@ -78,11 +97,9 @@ struct ErrPos {
}
};
struct NixCode {
ErrPos errPos;
std::optional<string> prevLineOfCode;
std::optional<string> errLineOfCode;
std::optional<string> nextLineOfCode;
struct Trace {
std::optional<ErrPos> pos;
hintformat hint;
};
struct ErrorInfo {
@ -90,19 +107,19 @@ struct ErrorInfo {
string name;
string description;
std::optional<hintformat> hint;
std::optional<NixCode> nixCode;
std::optional<ErrPos> errPos;
std::list<Trace> traces;
static std::optional<string> programName;
};
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo);
std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace);
/* BaseError should generally not be caught, as it has Interrupted as
a subclass. Catch Error instead. */
class BaseError : public std::exception
{
protected:
string prefix_; // used for location traces etc.
mutable ErrorInfo err;
mutable std::optional<string> what_;
@ -113,23 +130,23 @@ public:
template<typename... Args>
BaseError(unsigned int status, const Args & ... args)
: err { .level = lvlError,
.hint = hintfmt(args...)
}
: err {.level = lvlError,
.hint = hintfmt(args...)
}
, status(status)
{ }
template<typename... Args>
BaseError(const std::string & fs, const Args & ... args)
: err { .level = lvlError,
.hint = hintfmt(fs, args...)
}
: err {.level = lvlError,
.hint = hintfmt(fs, args...)
}
{ }
BaseError(hintformat hint)
: err { .level = lvlError,
.hint = hint
}
: err {.level = lvlError,
.hint = hint
}
{ }
BaseError(ErrorInfo && e)
@ -150,10 +167,17 @@ public:
#endif
const string & msg() const { return calcWhat(); }
const string & prefix() const { return prefix_; }
BaseError & addPrefix(const FormatOrString & fs);
const ErrorInfo & info() { calcWhat(); return err; }
template<typename... Args>
BaseError & addTrace(std::optional<ErrPos> e, const string &fs, const Args & ... args)
{
return addTrace(e, hintfmt(fs, args...));
}
BaseError & addTrace(std::optional<ErrPos> e, hintformat hint);
bool hasTrace() const { return !err.traces.empty(); }
};
#define MakeError(newClass, superClass) \

View file

@ -1,6 +1,7 @@
#pragma once
#include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string>
#include "ansicolor.hh"
@ -103,7 +104,9 @@ class hintformat
public:
hintformat(const string &format) :fmt(format)
{
fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
fmt.exceptions(boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
}
hintformat(const hintformat &hf)
@ -117,6 +120,13 @@ public:
return *this;
}
template<class T>
hintformat& operator%(const normaltxt<T> &value)
{
fmt % value.value;
return *this;
}
std::string str() const
{
return fmt.str();
@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f;
}
inline hintformat hintfmt(std::string plain_string)
{
// we won't be receiving any args in this case, so just print the original string
return hintfmt("%s", normaltxt(plain_string));
}
}

View file

@ -4,6 +4,7 @@
#include <openssl/md5.h>
#include <openssl/sha.h>
#include "args.hh"
#include "hash.hh"
#include "archive.hh"
#include "util.hh"
@ -18,11 +19,13 @@ namespace nix {
void Hash::init()
{
if (type == htMD5) hashSize = md5HashSize;
else if (type == htSHA1) hashSize = sha1HashSize;
else if (type == htSHA256) hashSize = sha256HashSize;
else if (type == htSHA512) hashSize = sha512HashSize;
else abort();
assert(type);
switch (*type) {
case htMD5: hashSize = md5HashSize; break;
case htSHA1: hashSize = sha1HashSize; break;
case htSHA256: hashSize = sha256HashSize; break;
case htSHA512: hashSize = sha512HashSize; break;
}
assert(hashSize <= maxHashSize);
memset(hash, 0, maxHashSize);
}
@ -98,15 +101,22 @@ static string printHash32(const Hash & hash)
string printHash16or32(const Hash & hash)
{
assert(hash.type);
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
}
HashType assertInitHashType(const Hash & h)
{
assert(h.type);
return *h.type;
}
std::string Hash::to_string(Base base, bool includeType) const
{
std::string s;
if (base == SRI || includeType) {
s += printHashType(type);
s += printHashType(assertInitHashType(*this));
s += base == SRI ? '-' : ':';
}
switch (base) {
@ -124,8 +134,10 @@ std::string Hash::to_string(Base base, bool includeType) const
return s;
}
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
Hash::Hash(std::string_view s, HashType type)
Hash::Hash(std::string_view s, std::optional<HashType> type)
: type(type)
{
size_t pos = 0;
@ -136,17 +148,17 @@ Hash::Hash(std::string_view s, HashType type)
sep = s.find('-');
if (sep != string::npos) {
isSRI = true;
} else if (type == htUnknown)
} else if (! type)
throw BadHash("hash '%s' does not include a type", s);
}
if (sep != string::npos) {
string hts = string(s, 0, sep);
this->type = parseHashType(hts);
if (this->type == htUnknown)
if (!this->type)
throw BadHash("unknown hash type '%s'", hts);
if (type != htUnknown && type != this->type)
throw BadHash("hash '%s' should have type '%s'", s, printHashType(type));
if (type && type != this->type)
throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type));
pos = sep + 1;
}
@ -202,13 +214,15 @@ Hash::Hash(std::string_view s, HashType type)
}
else
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(type));
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type));
}
Hash newHashAllowEmpty(std::string hashStr, HashType ht)
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
{
if (hashStr.empty()) {
Hash h(ht);
if (!ht)
throw BadHash("empty hash requires explicit hash type");
Hash h(*ht);
warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
return h;
} else
@ -328,24 +342,36 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
}
HashType parseHashType(const string & s)
std::optional<HashType> parseHashTypeOpt(const string & s)
{
if (s == "md5") return htMD5;
else if (s == "sha1") return htSHA1;
else if (s == "sha256") return htSHA256;
else if (s == "sha512") return htSHA512;
else return htUnknown;
else return std::optional<HashType> {};
}
HashType parseHashType(const string & s)
{
auto opt_h = parseHashTypeOpt(s);
if (opt_h)
return *opt_h;
else
throw UsageError("unknown hash algorithm '%1%'", s);
}
string printHashType(HashType ht)
{
if (ht == htMD5) return "md5";
else if (ht == htSHA1) return "sha1";
else if (ht == htSHA256) return "sha256";
else if (ht == htSHA512) return "sha512";
else abort();
switch (ht) {
case htMD5: return "md5";
case htSHA1: return "sha1";
case htSHA256: return "sha256";
case htSHA512: return "sha512";
default:
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
assert(false);
}
}
}

View file

@ -10,7 +10,7 @@ namespace nix {
MakeError(BadHash, Error);
enum HashType : char { htUnknown, htMD5, htSHA1, htSHA256, htSHA512 };
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
const int md5HashSize = 16;
@ -29,7 +29,7 @@ struct Hash
unsigned int hashSize = 0;
unsigned char hash[maxHashSize] = {};
HashType type = htUnknown;
std::optional<HashType> type = {};
/* Create an unset hash object. */
Hash() { };
@ -40,14 +40,18 @@ struct Hash
/* Initialize the hash from a string representation, in the format
"[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
Subresource Integrity hash expression). If the 'type' argument
is htUnknown, then the hash type must be specified in the
is not present, then the hash type must be specified in the
string. */
Hash(std::string_view s, HashType type = htUnknown);
Hash(std::string_view s, std::optional<HashType> type);
// type must be provided
Hash(std::string_view s, HashType type);
// hash type must be part of string
Hash(std::string_view s);
void init();
/* Check whether a hash is set. */
operator bool () const { return type != htUnknown; }
operator bool () const { return (bool) type; }
/* Check whether two hash are equal. */
bool operator == (const Hash & h2) const;
@ -95,7 +99,7 @@ struct Hash
};
/* Helper that defaults empty hashes to the 0 hash. */
Hash newHashAllowEmpty(std::string hashStr, HashType ht);
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht);
/* Print a hash in base-16 if it's MD5, or base-32 otherwise. */
string printHash16or32(const Hash & hash);
@ -118,6 +122,8 @@ Hash compressHash(const Hash & hash, unsigned int newSize);
/* Parse a string representing a hash type. */
HashType parseHashType(const string & s);
/* Will return nothing on parse error */
std::optional<HashType> parseHashTypeOpt(const string & s);
/* And the reverse. */
string printHashType(HashType ht);

View file

@ -1,5 +1,6 @@
#include "logging.hh"
#include "util.hh"
#include "config.hh"
#include <atomic>
#include <nlohmann/json.hpp>
@ -7,6 +8,10 @@
namespace nix {
LoggerSettings loggerSettings;
static GlobalConfig::Register r1(&loggerSettings);
static thread_local ActivityId curActivity = 0;
ActivityId getCurActivity()
@ -72,10 +77,11 @@ public:
void logEI(const ErrorInfo & ei) override
{
std::stringstream oss;
oss << ei;
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(ei.level, oss.str());
}
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
const std::string & s, const Fields & fields, ActivityId parent)
@ -173,7 +179,7 @@ struct JSONLogger : Logger {
void logEI(const ErrorInfo & ei) override
{
std::ostringstream oss;
oss << ei;
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
nlohmann::json json;
json["action"] = "msg";

View file

@ -2,6 +2,7 @@
#include "types.hh"
#include "error.hh"
#include "config.hh"
namespace nix {
@ -34,6 +35,16 @@ typedef enum {
typedef uint64_t ActivityId;
struct LoggerSettings : Config
{
Setting<bool> showTrace{this,
false,
"show-trace",
"Whether to show a stack trace on evaluation errors."};
};
extern LoggerSettings loggerSettings;
class Logger
{
friend struct Activity;

View file

@ -0,0 +1,78 @@
#include "compression.hh"
#include <gtest/gtest.h>
namespace nix {
/* ----------------------------------------------------------------------------
* compress / decompress
* --------------------------------------------------------------------------*/
TEST(compress, compressWithUnknownMethod) {
ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod);
}
TEST(compress, noneMethodDoesNothingToTheInput) {
ref<std::string> o = compress("none", "this-is-a-test");
ASSERT_EQ(*o, "this-is-a-test");
}
TEST(decompress, decompressXzCompressed) {
auto method = "xz";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
ASSERT_EQ(*o, str);
}
TEST(decompress, decompressBzip2Compressed) {
auto method = "bzip2";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
ASSERT_EQ(*o, str);
}
TEST(decompress, decompressBrCompressed) {
auto method = "br";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
ASSERT_EQ(*o, str);
}
TEST(decompress, decompressInvalidInputThrowsCompressionError) {
auto method = "bzip2";
auto str = "this is a string that does not qualify as valid bzip2 data";
ASSERT_THROW(decompress(method, str), CompressionError);
}
/* ----------------------------------------------------------------------------
* compression sinks
* --------------------------------------------------------------------------*/
TEST(makeCompressionSink, noneSinkDoesNothingToInput) {
StringSink strSink;
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
auto sink = makeCompressionSink("none", strSink);
(*sink)(inputString);
sink->finish();
ASSERT_STREQ((*strSink.s).c_str(), inputString);
}
TEST(makeCompressionSink, compressAndDecompress) {
StringSink strSink;
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
auto decompressionSink = makeDecompressionSink("bzip2", strSink);
auto sink = makeCompressionSink("bzip2", *decompressionSink);
(*sink)(inputString);
sink->finish();
decompressionSink->finish();
ASSERT_STREQ((*strSink.s).c_str(), inputString);
}
}

View file

@ -72,9 +72,4 @@ namespace nix {
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
"c7d329eeb6dd26545e96e55b874be909");
}
TEST(hashString, hashingWithUnknownAlgoExits) {
auto s = "unknown";
ASSERT_DEATH(hashString(HashType::htUnknown, s), "");
}
}

View file

@ -1,6 +1,7 @@
#include "logging.hh"
#include "nixexpr.hh"
#include "util.hh"
#include <fstream>
#include <gtest/gtest.h>
@ -10,6 +11,13 @@ namespace nix {
* logEI
* --------------------------------------------------------------------------*/
const char *test_file =
"previous line of code\n"
"this is the problem line of code\n"
"next line of code\n";
const char *one_liner =
"this is the other problem line of code";
TEST(logEI, catpuresBasicProperties) {
MakeError(TestError, Error);
@ -42,7 +50,7 @@ namespace nix {
logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
}
}
@ -60,8 +68,7 @@ namespace nix {
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
}
}
@ -69,9 +76,9 @@ namespace nix {
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlInfo,
.name = "Info name",
.description = "Info description",
});
.name = "Info name",
.description = "Info description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
@ -85,7 +92,7 @@ namespace nix {
logger->logEI({ .level = lvlTalkative,
.name = "Talkative name",
.description = "Talkative description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
@ -99,7 +106,7 @@ namespace nix {
logger->logEI({ .level = lvlChatty,
.name = "Chatty name",
.description = "Talkative description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
@ -113,7 +120,7 @@ namespace nix {
logger->logEI({ .level = lvlDebug,
.name = "Debug name",
.description = "Debug description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
@ -127,7 +134,7 @@ namespace nix {
logger->logEI({ .level = lvlVomit,
.name = "Vomit name",
.description = "Vomit description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
@ -137,14 +144,13 @@ namespace nix {
* logError
* --------------------------------------------------------------------------*/
TEST(logError, logErrorWithoutHintOrCode) {
testing::internal::CaptureStderr();
logError({
.name = "name",
.description = "error description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
@ -152,7 +158,7 @@ namespace nix {
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr();
@ -160,53 +166,43 @@ namespace nix {
.name = "error name",
.description = "error with code lines",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code",
}});
"yellow",
"values"),
.errPos = Pos(foString, problem_file, 02, 13),
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nerror with code lines\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithoutLinesOfCode) {
TEST(logError, logErrorWithInvalidFile) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
auto problem_file = testTable.create("invalid filename");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.description = "error without any code lines.",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13)
}});
"yellow",
"values"),
.errPos = Pos(foFile, problem_file, 02, 13)
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1minvalid filename\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m in file: \x1B[0minvalid filename\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithOnlyHintAndName) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.hint = hintfmt("hint %1%", "only"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13)
}});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nhint \x1B[33;1monly\x1B[0m\n");
}
@ -218,19 +214,19 @@ namespace nix {
testing::internal::CaptureStderr();
logWarning({
.name = "name",
.description = "error description",
.hint = hintfmt("there was a %1%", "warning"),
});
.name = "name",
.description = "warning description",
.hint = hintfmt("there was a %1%", "warning"),
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nwarning description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
}
TEST(logWarning, logWarningWithFileLineNumAndCode) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr();
@ -238,18 +234,125 @@ namespace nix {
.name = "warning name",
.description = "warning description",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
}});
"yellow",
"values"),
.errPos = Pos(foStdin, problem_file, 2, 13),
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from stdin\x1B[0m\n\nwarning description\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
/* ----------------------------------------------------------------------------
* traces
* --------------------------------------------------------------------------*/
TEST(addTrace, showTracesWithShowTrace) {
SymbolTable testTable;
auto problem_file = testTable.create(test_file);
auto oneliner_file = testTable.create(one_liner);
auto e = AssertionError(ErrorInfo {
.name = "wat",
.description = "a well-known problem occurred",
.hint = hintfmt("it has been %1% days since our last error", "zero"),
.errPos = Pos(foString, problem_file, 2, 13),
});
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
testing::internal::CaptureStderr();
loggerSettings.showTrace.assign(true);
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n");
}
TEST(addTrace, hideTracesWithoutShowTrace) {
SymbolTable testTable;
auto problem_file = testTable.create(test_file);
auto oneliner_file = testTable.create(one_liner);
auto e = AssertionError(ErrorInfo {
.name = "wat",
.description = "a well-known problem occurred",
.hint = hintfmt("it has been %1% days since our last error", "zero"),
.errPos = Pos(foString, problem_file, 2, 13),
});
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
testing::internal::CaptureStderr();
loggerSettings.showTrace.assign(false);
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
}
/* ----------------------------------------------------------------------------
* hintfmt
* --------------------------------------------------------------------------*/
TEST(hintfmt, percentStringWithoutArgs) {
const char *teststr = "this is 100%s correct!";
ASSERT_STREQ(
hintfmt(teststr).str().c_str(),
teststr);
}
TEST(hintfmt, fmtToHintfmt) {
ASSERT_STREQ(
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
"the color of this this text is not yellow");
}
TEST(hintfmt, tooFewArguments) {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
"only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
}
TEST(hintfmt, tooManyArguments) {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
"what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
/* ----------------------------------------------------------------------------
* ErrPos
* --------------------------------------------------------------------------*/
TEST(errpos, invalidPos) {
// contains an invalid symbol, which we should not dereference!
Pos invalid;
// constructing without access violation.
ErrPos ep(invalid);
// assignment without access violation.
ep = invalid;
}
}

View file

@ -593,7 +593,7 @@ static void upgradeDerivations(Globals & globals,
} else newElems.push_back(i);
} catch (Error & e) {
e.addPrefix(fmt("while trying to find an upgrade for '%s':\n", i.queryName()));
e.addTrace(std::nullopt, "while trying to find an upgrade for '%s'", i.queryName());
throw;
}
}
@ -1185,7 +1185,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
} catch (AssertionError & e) {
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
} catch (Error & e) {
e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName()));
e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
throw;
}
}

View file

@ -72,8 +72,6 @@ static int _main(int argc, char * * argv)
else if (*arg == "--type") {
string s = getArg(*arg, arg, end);
ht = parseHashType(s);
if (ht == htUnknown)
throw UsageError("unknown hash type '%1%'", s);
}
else if (*arg == "--print-path")
printPath = true;

View file

@ -725,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path);
HashSink sink(info->narHash.type);
HashSink sink(*info->narHash.type);
store->narFromPath(path, sink);
auto current = sink.finish();
if (current.first != info->narHash) {
@ -864,7 +864,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize
<< info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << info->ca << info->sigs;
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
} catch (InvalidPath &) {
}
}
@ -952,7 +952,7 @@ static void opServe(Strings opFlags, Strings opArgs)
info.references = readStorePaths<StorePathSet>(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in);
in >> info.ca;
info.ca = parseContentAddressOpt(readString(in));
if (info.narSize == 0)
throw Error("narInfo is too old and missing the narSize field");

View file

@ -48,7 +48,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
info.ca = std::optional { FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
} };
if (!dryRun) {
auto source = StringSource { *sink.s };

View file

@ -50,7 +50,7 @@ BuildEnvironment readEnvironment(const Path & path)
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
static std::string indexedArrayRegex =
R"re((?:\(( *\[[0-9]+]="(?:[^"\\]|\\.)*")**\)))re";
R"re((?:\(( *\[[0-9]+\]="(?:[^"\\]|\\.)*")*\)))re";
static std::regex varRegex(
"^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n");
@ -135,7 +135,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true);
auto shellOutPath = store->makeOutputPath("out", h, drvName);
drv.outputs.insert_or_assign("out", DerivationOutput { shellOutPath, "", "" });
drv.outputs.insert_or_assign("out", DerivationOutput { .path = shellOutPath });
drv.env["out"] = store->printStorePath(shellOutPath);
auto shellDrvPath2 = writeDerivation(store, drv, drvName);

View file

@ -1,5 +1,6 @@
#include "command.hh"
#include "hash.hh"
#include "content-address.hh"
#include "legacy.hh"
#include "shared.hh"
#include "references.hh"
@ -79,12 +80,12 @@ static RegisterCommand r2("hash-path", [](){ return make_ref<CmdHash>(FileIngest
struct CmdToBase : Command
{
Base base;
HashType ht = htUnknown;
std::optional<HashType> ht;
std::vector<std::string> args;
CmdToBase(Base base) : base(base)
{
addFlag(Flag::mkHashTypeFlag("type", &ht));
addFlag(Flag::mkHashTypeOptFlag("type", &ht));
expectArgs("strings", &args);
}
@ -132,8 +133,6 @@ static int compatNixHash(int argc, char * * argv)
else if (*arg == "--type") {
string s = getArg(*arg, arg, end);
ht = parseHashType(s);
if (ht == htUnknown)
throw UsageError("unknown hash type '%1%'", s);
}
else if (*arg == "--to-base16") op = opTo16;
else if (*arg == "--to-base32") op = opTo32;

View file

@ -82,7 +82,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
if (hasSelfReference) info.references.insert(info.path);
info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
};
if (!json)
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));

View file

@ -115,7 +115,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << '\t';
Strings ss;
if (info->ultimate) ss.push_back("ultimate");
if (info->ca != "") ss.push_back("ca:" + info->ca);
if (info->ca) ss.push_back("ca:" + renderContentAddress(*info->ca));
for (auto & sig : info->sigs) ss.push_back(sig);
std::cout << concatStringsSep(" ", ss);
}

View file

@ -211,12 +211,12 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
// input without clearing the input so far.
continue;
} else {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
printMsg(lvlError, e.msg());
}
} catch (Error & e) {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
printMsg(lvlError, e.msg());
} catch (Interrupted & e) {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
printMsg(lvlError, e.msg());
}
// We handled the current input fully, so we should clear it

View file

@ -216,7 +216,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
} catch (AssertionError & e) {
} catch (Error & e) {
if (!toplevel) {
e.addPrefix(fmt("While evaluating the attribute '%s':\n", attrPath));
e.addTrace(std::nullopt, "While evaluating the attribute '%s'", attrPath);
throw;
}
}

View file

@ -70,9 +70,9 @@ struct CmdShowDerivation : InstallablesCommand
for (auto & output : drv.outputs) {
auto outputObj(outputsObj.object(output.first));
outputObj.attr("path", store->printStorePath(output.second.path));
if (output.second.hash != "") {
outputObj.attr("hashAlgo", output.second.hashAlgo);
outputObj.attr("hash", output.second.hash);
if (output.second.hash) {
outputObj.attr("hashAlgo", output.second.hash->printMethodAlgo());
outputObj.attr("hash", output.second.hash->hash.to_string(Base16, false));
}
}
}

View file

@ -87,10 +87,10 @@ struct CmdVerify : StorePathsCommand
if (!noContents) {
std::unique_ptr<AbstractHashSink> hashSink;
if (info->ca == "")
hashSink = std::make_unique<HashSink>(info->narHash.type);
if (!info->ca)
hashSink = std::make_unique<HashSink>(*info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink);

View file

@ -1,23 +1,39 @@
{ busybox }:
with import ./config.nix;
let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation {
name = "build-hook-input-1";
buildCommand = "mkdir $out; echo FOO > $out/foo";
shell = busybox;
name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"];
};
input2 = mkDerivation {
name = "build-hook-input-2";
buildCommand = "mkdir $out; echo BAR > $out/bar";
shell = busybox;
name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
};
in
mkDerivation {
name = "build-hook";
builder = ./dependencies.builder0.sh;
input1 = " " + input1 + "/.";
input2 = " ${input2}/.";
shell = busybox;
name = "build-remote";
buildCommand =
''
read x < ${input1}
read y < ${input2}
echo $x$y > $out
'';
}

View file

@ -3,22 +3,29 @@ source common.sh
clearStore
if ! canUseSandbox; then exit; fi
if [[ ! $SHELL =~ /nix/store ]]; then exit; fi
if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/store0 || true
chmod -R u+w $TEST_ROOT/store1 || true
rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1
chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/machine1 || true
chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--sandbox-paths /nix/store --sandbox-build-dir /build-tmp \
--builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \
unset NIX_STORE_DIR
unset NIX_STATE_DIR
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
--system-features foo
outPath=$TEST_ROOT/result
outPath=$(readlink -f $TEST_ROOT/result)
cat $outPath/foobar | grep FOOBAR
cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
# Ensure that input1 was built on store1 due to the required feature.
p=$(readlink -f $outPath/input-2)
(! nix path-info --store $TEST_ROOT/store0 --all | grep builder-build-hook-input-1.sh)
nix path-info --store $TEST_ROOT/store1 --all | grep builder-build-hook-input-1.sh
# Ensure that input1 was built on store2 due to the required feature.
(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh

View file

@ -1,6 +1,6 @@
set -e
export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)
export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default}
export NIX_STORE_DIR
if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then
# Maybe the build directory is symlinked.
@ -11,6 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/daemon-socket
unset NIX_USER_CONF_FILES
export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then
@ -35,6 +36,7 @@ export xmllint="@xmllint@"
export SHELL="@bash@"
export PAGER=cat
export HAVE_SODIUM="@HAVE_SODIUM@"
export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@
export system=@system@
@ -75,7 +77,7 @@ startDaemon() {
rm -f $NIX_STATE_DIR/daemon-socket/socket
nix-daemon &
for ((i = 0; i < 30; i++)); do
if [ -e $NIX_STATE_DIR/daemon-socket/socket ]; then break; fi
if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
sleep 1
done
pidDaemon=$!

View file

@ -13,24 +13,32 @@ fake_free=$TEST_ROOT/fake-free
export _NIX_TEST_FREE_SPACE_FILE=$fake_free
echo 1100 > $fake_free
fifoLock=$TEST_ROOT/fifoLock
mkfifo "$fifoLock"
expr=$(cat <<EOF
with import ./config.nix; mkDerivation {
name = "gc-A";
buildCommand = ''
set -x
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 3 ]]
mkdir \$out
echo foo > \$out/bar
echo 1...
sleep 2
echo 200 > ${fake_free}.tmp1
# Pretend that we run out of space
echo 100 > ${fake_free}.tmp1
mv ${fake_free}.tmp1 $fake_free
echo 2...
sleep 2
echo 3...
sleep 2
echo 4...
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]]
# Wait for the GC to run
for i in {1..20}; do
echo ''\${i}...
if [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]]; then
exit 0
fi
sleep 1
done
exit 1
'';
}
EOF
@ -43,15 +51,9 @@ with import ./config.nix; mkDerivation {
set -x
mkdir \$out
echo foo > \$out/bar
echo 1...
sleep 2
echo 200 > ${fake_free}.tmp2
mv ${fake_free}.tmp2 $fake_free
echo 2...
sleep 2
echo 3...
sleep 2
echo 4...
# Wait for the first build to finish
cat "$fifoLock"
'';
}
EOF
@ -59,12 +61,19 @@ EOF
nix build -v -o $TEST_ROOT/result-A -L "($expr)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid=$!
pid1=$!
nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1
--min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid2=$!
wait "$pid"
# Once the first build is done, unblock the second one.
# If the first build fails, we need to postpone the failure to still allow
# the second one to finish
wait "$pid1" || FIRSTBUILDSTATUS=$?
echo "unlock" > $fifoLock
( exit ${FIRSTBUILDSTATUS:-0} )
wait "$pid2"
[[ foo = $(cat $TEST_ROOT/result-A/bar) ]]
[[ foo = $(cat $TEST_ROOT/result-B/bar) ]]

View file

@ -1,7 +1,10 @@
echo "Build started" > "$lockFifo"
mkdir $out
echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar
sleep 10
# Wait for someone to write on the fifo
cat "$lockFifo"
# $out should not have been GC'ed while we were sleeping, but just in
# case...

View file

@ -1,5 +1,7 @@
with import ./config.nix;
{ lockFifo ? null }:
rec {
input1 = mkDerivation {
@ -16,6 +18,7 @@ rec {
name = "gc-concurrent";
builder = ./gc-concurrent.builder.sh;
inherit input1 input2;
inherit lockFifo;
};
test2 = mkDerivation {

View file

@ -2,7 +2,10 @@ source common.sh
clearStore
drvPath1=$(nix-instantiate gc-concurrent.nix -A test1)
lockFifo1=$TEST_ROOT/test1.fifo
mkfifo "$lockFifo1"
drvPath1=$(nix-instantiate gc-concurrent.nix -A test1 --argstr lockFifo "$lockFifo1")
outPath1=$(nix-store -q $drvPath1)
drvPath2=$(nix-instantiate gc-concurrent.nix -A test2)
@ -22,19 +25,16 @@ ln -s $outPath3 "$NIX_STATE_DIR"/gcroots/foo2
nix-store -rvv "$drvPath1" &
pid1=$!
# Start build #2 in the background after 10 seconds.
(sleep 10 && nix-store -rvv "$drvPath2") &
pid2=$!
# Wait for the build of $drvPath1 to start
cat $lockFifo1
# Run the garbage collector while the build is running.
sleep 6
nix-collect-garbage
# Wait for build #1/#2 to finish.
# Unlock the build of $drvPath1
echo "" > $lockFifo1
echo waiting for pid $pid1 to finish...
wait $pid1
echo waiting for pid $pid2 to finish...
wait $pid2
# Check that the root of build #1 and its dependencies haven't been
# deleted. The should not be deleted by the GC because they were
@ -42,8 +42,9 @@ wait $pid2
cat $outPath1/foobar
cat $outPath1/input-2/bar
# Check that build #2 has succeeded. It should succeed because the
# derivation is a GC root.
# Check that the build build $drvPath2 succeeds.
# It should succeed because the derivation is a GC root.
nix-store -rvv "$drvPath2"
cat $outPath2/foobar
rm -f "$NIX_STATE_DIR"/gcroots/foo*

View file

@ -3,5 +3,3 @@ echo $(cat $input1/foo)$(cat $input2/bar)xyzzy > $out/foobar
# Check that the GC hasn't deleted the lock on our output.
test -e "$out.lock"
sleep 6

View file

@ -18,6 +18,7 @@ build-users-group =
keep-derivations = false
sandbox = false
experimental-features = nix-command flakes
gc-reserved-space = 0
include nix.conf.extra
EOF

View file

@ -40,4 +40,4 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh
installcheck: $(d)/common.sh $(d)/config.nix $(d)/plugins/libplugintest.$(SO_EXT)
test-deps += tests/common.sh tests/config.nix tests/plugins/libplugintest.$(SO_EXT)

View file

@ -16,6 +16,11 @@ nix-env --foo 2>&1 | grep "no operation"
nix-env -q --foo 2>&1 | grep "unknown flag"
# Eval Errors.
eval_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
echo $eval_res | grep "(string) (1:15)"
echo $eval_res | grep "infinite recursion encountered"
eval_arg_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
echo $eval_arg_res | grep "at: (1:15) from string"
echo $eval_arg_res | grep "infinite recursion encountered"
eval_stdin_res=$(echo 'let a = {} // a; in a.foo' | nix-instantiate --eval -E - 2>&1 || true)
echo $eval_stdin_res | grep "at: (1:15) from stdin"
echo $eval_stdin_res | grep "infinite recursion encountered"

View file

@ -55,3 +55,10 @@ chmod a+rx $TEST_ROOT/shell.shebang.rb
output=$($TEST_ROOT/shell.shebang.rb abc ruby)
[ "$output" = '-e load("'"$TEST_ROOT"'/shell.shebang.rb") -- abc ruby' ]
# Test 'nix develop'.
nix develop -f shell.nix shellDrv -c bash -c '[[ -n $stdenv ]]'
# Test 'nix print-dev-env'.
source <(nix print-dev-env -f shell.nix shellDrv)
[[ -n $stdenv ]]

View file

@ -2,6 +2,8 @@ source common.sh
clearStore
rm -f $TEST_ROOT/result
export REMOTE_STORE=$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store

View file

@ -5,6 +5,8 @@ if [[ $(uname) != Linux ]]; then exit; fi
clearStore
rm -f $TEST_ROOT/result
export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '(

View file

@ -2,6 +2,8 @@ source common.sh
clearStore
rm -f $TEST_ROOT/result
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
[[ $(cat $TEST_ROOT/result/foo) = bar ]]