Merge branch 'validPathInfo-temp' into validPathInfo-ca-proper-datatype

This commit is contained in:
John Ericson 2020-06-18 23:01:58 +00:00
commit 3f8dcfe3fd
162 changed files with 3082 additions and 2187 deletions

3
.gitignore vendored
View file

@ -49,6 +49,9 @@ perl/Makefile.config
# /src/libstore/
*.gen.*
# /src/libutil/
/src/libutil/tests/libutil-tests
/src/nix/nix
# /src/nix-env/

View file

@ -1,7 +1,6 @@
makefiles = \
mk/precompiled-headers.mk \
local.mk \
nix-rust/local.mk \
src/libutil/local.mk \
src/libutil/tests/local.mk \
src/libstore/local.mk \

View file

@ -70,7 +70,7 @@ path just built.</para>
<screen>
$ nix-build ./deterministic.nix -A stable
these derivations will be built:
this derivation will be built:
/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv
building '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable
@ -85,7 +85,7 @@ checking outputs of '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
<screen>
$ nix-build ./deterministic.nix -A unstable
these derivations will be built:
this derivation will be built:
/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv
building '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable
@ -193,7 +193,7 @@ repeat = 1
An example output of this configuration:
<screen>
$ nix-build ./test.nix -A unstable
these derivations will be built:
this derivation will be built:
/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)...
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)...

View file

@ -122,7 +122,7 @@ post-build-hook = /etc/nix/upload-to-cache.sh
<screen>
$ nix-build -E '(import &lt;nixpkgs&gt; {}).writeText "example" (builtins.toString builtins.currentTime)'
these derivations will be built:
this derivation will be built:
/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv
building '/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv'...
running post-build-hook '/home/grahamc/projects/github.com/NixOS/nix/post-hook.sh'...

View file

@ -516,7 +516,7 @@ source:
$ nix-env -f '&lt;nixpkgs>' -iA hello --dry-run
(dry run; not doing anything)
installing hello-2.10
these paths will be fetched (0.04 MiB download, 0.19 MiB unpacked):
this path will be fetched (0.04 MiB download, 0.19 MiB unpacked):
/nix/store/wkhdf9jinag5750mqlax6z2zbwhqb76n-hello-2.10
<replaceable>...</replaceable></screen>

View file

@ -11,6 +11,10 @@
<arg>
<arg choice='plain'><option>--quiet</option></arg>
</arg>
<arg>
<option>--log-format</option>
<replaceable>format</replaceable>
</arg>
<arg>
<group choice='plain'>
<arg choice='plain'><option>--no-build-output</option></arg>

View file

@ -92,6 +92,37 @@
</varlistentry>
<varlistentry xml:id="opt-log-format"><term><option>--log-format</option> <replaceable>format</replaceable></term>
<listitem>
<para>This option can be used to change the output of the log format, with
<replaceable>format</replaceable> being one of:</para>
<variablelist>
<varlistentry><term>raw</term>
<listitem><para>This is the raw format, as outputted by nix-build.</para></listitem>
</varlistentry>
<varlistentry><term>internal-json</term>
<listitem><para>Outputs the logs in a structured manner. NOTE: the json schema is not guarantees to be stable between releases.</para></listitem>
</varlistentry>
<varlistentry><term>bar</term>
<listitem><para>Only display a progress bar during the builds.</para></listitem>
</varlistentry>
<varlistentry><term>bar-with-logs</term>
<listitem><para>Display the raw logs, with the progress bar at the bottom.</para></listitem>
</varlistentry>
</variablelist>
</listitem>
</varlistentry>
<varlistentry><term><option>--no-build-output</option> / <option>-Q</option></term>
<listitem><para>By default, output written by builders to standard

View file

@ -39,7 +39,7 @@ bundle.</para>
<step><para>Set the environment variable and install Nix</para>
<screen>
$ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt
$ sh &lt;(curl https://nixos.org/nix/install)
$ sh &lt;(curl -L https://nixos.org/nix/install)
</screen></step>
<step><para>In the shell profile and rc files (for example,

View file

@ -12,7 +12,7 @@
</para>
<screen>
$ sh &lt;(curl https://nixos.org/nix/install)
$ sh &lt;(curl -L https://nixos.org/nix/install)
</screen>
<para>
@ -39,7 +39,7 @@
To explicitly select a single-user installation on your system:
<screen>
sh &lt;(curl https://nixos.org/nix/install) --no-daemon
sh &lt;(curl -L https://nixos.org/nix/install) --no-daemon
</screen>
</para>

View file

@ -15,7 +15,7 @@ to subsequent chapters.</para>
<step><para>Install single-user Nix by running the following:
<screen>
$ bash &lt;(curl https://nixos.org/nix/install)
$ bash &lt;(curl -L https://nixos.org/nix/install)
</screen>
This will install Nix in <filename>/nix</filename>. The install script

View file

@ -142,8 +142,12 @@ $oldName =~ s/"//g;
sub getStorePath {
my ($jobName) = @_;
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
die unless $buildInfo->{buildproducts}->{1}->{type} eq "nix-build";
return $buildInfo->{buildproducts}->{1}->{path};
for my $product (values %{$buildInfo->{buildproducts}}) {
next unless $product->{type} eq "nix-build";
next if $product->{path} =~ /[a-z]+$/;
return $product->{path};
}
die;
}
write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",

View file

@ -125,7 +125,8 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
$(trace-ar) $(AR) crs $$@ $$?
$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)

View file

@ -64,13 +64,6 @@ pub extern "C" fn ffi_StorePath_clone(self_: &StorePath) -> StorePath {
self_.clone()
}
#[no_mangle]
pub extern "C" fn ffi_StorePath_clone_to(self_: &StorePath, other: *mut StorePath) {
unsafe {
core::ptr::write(other, self_.clone());
}
}
#[no_mangle]
pub extern "C" fn ffi_StorePath_name(self_: &StorePath) -> &str {
self_.name.name()

View file

@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string();
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? Base::Base32 : Base::Base16);
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);
@ -192,7 +192,7 @@ SV * hashPath(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashPath(parseHashType(algo), path).first;
auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
auto s = h.to_string(base32 ? Base32 : Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -203,7 +203,7 @@ SV * hashFile(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashFile(parseHashType(algo), path);
auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
auto s = h.to_string(base32 ? Base32 : Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -214,7 +214,7 @@ SV * hashString(char * algo, int base32, char * s)
PPCODE:
try {
Hash h = hashString(parseHashType(algo), s);
auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
auto s = h.to_string(base32 ? Base32 : Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -225,7 +225,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
PPCODE:
try {
Hash h(s, parseHashType(algo));
string s = h.to_string(toBase32 ? Base::Base32 : Base::Base16, false);
string s = h.to_string(toBase32 ? Base32 : Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());

View file

@ -50,7 +50,6 @@ rec {
libarchive
boost
nlohmann_json
rustc cargo
# Tests
git

View file

@ -12,64 +12,8 @@ let
builtins.readFile ./.version
+ (if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}");
# Create a "vendor" directory that contains the crates listed in
# Cargo.lock. This allows Nix to be built without network access.
vendoredCrates' =
let
lockFile = builtins.fromTOML (builtins.readFile nix-rust/Cargo.lock);
files = map (pkg: import <nix/fetchurl.nix> {
url = "https://crates.io/api/v1/crates/${pkg.name}/${pkg.version}/download";
sha256 = lockFile.metadata."checksum ${pkg.name} ${pkg.version} (registry+https://github.com/rust-lang/crates.io-index)";
}) (builtins.filter (pkg: pkg.source or "" == "registry+https://github.com/rust-lang/crates.io-index") lockFile.package);
in pkgs.runCommand "cargo-vendor-dir" {}
''
mkdir -p $out/vendor
cat > $out/vendor/config <<EOF
[source.crates-io]
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "vendor"
EOF
${toString (builtins.map (file: ''
mkdir $out/vendor/tmp
tar xvf ${file} -C $out/vendor/tmp
dir=$(echo $out/vendor/tmp/*)
# Add just enough metadata to keep Cargo happy.
printf '{"files":{},"package":"${file.outputHash}"}' > "$dir/.cargo-checksum.json"
# Clean up some cruft from the winapi crates. FIXME: find
# a way to remove winapi* from our dependencies.
if [[ $dir =~ /winapi ]]; then
find $dir -name "*.a" -print0 | xargs -0 rm -f --
fi
mv "$dir" $out/vendor/
rm -rf $out/vendor/tmp
'') files)}
'';
jobs = rec {
vendoredCrates =
with pkgs;
runCommand "vendored-crates" {}
''
mkdir -p $out/nix-support
name=nix-vendored-crates-${version}
fn=$out/$name.tar.xz
tar cvfJ $fn -C ${vendoredCrates'} vendor \
--owner=0 --group=0 --mode=u+rw,uga+r \
--transform "s,vendor,$name,"
echo "file crates-tarball $fn" >> $out/nix-support/hydra-build-products
'';
build = pkgs.lib.genAttrs systems (system:
let pkgs = import nixpkgs { inherit system; }; in
@ -101,8 +45,6 @@ let
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
''}
ln -sfn ${vendoredCrates'}/vendor/ nix-rust/vendor
(cd perl; autoreconf --install --force --verbose)
'';
@ -247,11 +189,6 @@ let
src = nix;
preConfigure =
''
ln -sfn ${vendoredCrates'}/vendor/ nix-rust/vendor
'';
enableParallelBuilding = true;
buildInputs = buildDeps ++ propagatedDeps;

View file

@ -7,7 +7,7 @@ with import ./release-common.nix { inherit pkgs; };
(if useClang then clangStdenv else stdenv).mkDerivation {
name = "nix";
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps ++ [ pkgs.rustfmt ];
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps;
inherit configureFlags;

View file

@ -184,7 +184,7 @@ static int _main(int argc, char * * argv)
try {
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("connecting to '%s'", bestMachine->storeUri));
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
Store::Params storeParams;
if (hasPrefix(bestMachine->storeUri, "ssh://")) {
@ -200,9 +200,12 @@ static int _main(int argc, char * * argv)
} catch (std::exception & e) {
auto msg = chomp(drainFD(5, false));
printError("cannot build on '%s': %s%s",
logError({
.name = "Remote build",
.hint = hintfmt("cannot build on '%s': %s%s",
bestMachine->storeUri, e.what(),
(msg.empty() ? "" : ": " + msg));
(msg.empty() ? "" : ": " + msg))
});
bestMachine->enabled = false;
continue;
}
@ -222,7 +225,7 @@ connected:
AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri) + ".upload-lock", true);
{
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("waiting for the upload lock to '%s'", storeUri));
Activity act(*logger, lvlTalkative, actUnknown, fmt("waiting for the upload lock to '%s'", storeUri));
auto old = signal(SIGALRM, handleAlarm);
alarm(15 * 60);
@ -235,13 +238,13 @@ connected:
auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute;
{
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("copying dependencies to '%s'", storeUri));
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri));
copyPaths(store, ref<Store>(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
}
uploadLock = -1;
BasicDerivation drv(readDerivation(*store, store->realStoreDir + "/" + std::string(drvPath->to_string())));
auto drv = store->readDerivation(*drvPath);
drv.inputSrcs = store->parseStorePathSet(inputs);
auto result = sshStore->buildDerivation(*drvPath, drv);
@ -254,7 +257,7 @@ connected:
if (!store->isValidPath(store->parseStorePath(path))) missing.insert(store->parseStorePath(path));
if (!missing.empty()) {
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("copying outputs from '%s'", storeUri));
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri));
for (auto & i : missing)
store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */
copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute);

View file

@ -51,7 +51,7 @@ using string_to_base_map
= std::unordered_map<std::string, std::shared_ptr<base>>;
#endif
// if defined, `base` will retain type information in form of an enum struct
// if defined, `base` will retain type information in form of an enum class
// such that static_cast can be used instead of dynamic_cast
// #define CPPTOML_NO_RTTI
@ -405,7 +405,7 @@ inline std::shared_ptr<table_array> make_table_array(bool is_inline = false);
#if defined(CPPTOML_NO_RTTI)
/// Base type used to store underlying data type explicitly if RTTI is disabled
enum struct base_type
enum class base_type
{
NONE,
STRING,
@ -2268,7 +2268,7 @@ class parser
return key;
}
enum struct parse_type
enum class parse_type
{
STRING = 1,
LOCAL_TIME,

View file

@ -1,66 +0,0 @@
#include "error.hh"
#include "nixexpr.hh"
#include <iostream>
#include <optional>
int main()
{
using namespace nix;
// In each program where errors occur, this has to be set.
ErrorInfo::programName = std::optional("error-demo");
// Error in a program; no hint and no nix code.
printErrorInfo(
ErrorInfo { .level = elError,
.name = "name",
.description = "error description",
});
// Warning with name, description, and hint.
// The hintfmt function makes all the substituted text yellow.
printErrorInfo(
ErrorInfo { .level = elWarning,
.name = "name",
.description = "error description",
.hint = std::optional(
hintfmt("there was a %1%", "warning")),
});
// Warning with nix file, line number, column, and the lines of
// code where a warning occurred.
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
printErrorInfo(
ErrorInfo{
.level = elWarning,
.name = "warning name",
.description = "warning description",
.hint = hintfmt("this hint has %1% templated %2%!!", "yellow", "values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
}});
// Error with previous and next lines of code.
printErrorInfo(
ErrorInfo{
.level = elError,
.name = "error name",
.description = "error description",
.hint = hintfmt("this hint has %1% templated %2%!!", "yellow", "values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::optional("previous line of code"),
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::optional("next line of code"),
}});
return 0;
}

View file

@ -1,12 +0,0 @@
programs += error-demo
error-demo_DIR := $(d)
error-demo_SOURCES := \
$(wildcard $(d)/*.cc) \
error-demo_CXXFLAGS += -I src/libutil -I src/libexpr
error-demo_LIBS = libutil libexpr
error-demo_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system

View file

@ -6,11 +6,11 @@
namespace nix {
static Strings parseAttrPath(const string & s)
static Strings parseAttrPath(std::string_view s)
{
Strings res;
string cur;
string::const_iterator i = s.begin();
auto i = s.begin();
while (i != s.end()) {
if (*i == '.') {
res.push_back(cur);
@ -19,7 +19,7 @@ static Strings parseAttrPath(const string & s)
++i;
while (1) {
if (i == s.end())
throw Error(format("missing closing quote in selection path '%1%'") % s);
throw Error("missing closing quote in selection path '%1%'", s);
if (*i == '"') break;
cur.push_back(*i++);
}
@ -32,6 +32,15 @@ static Strings parseAttrPath(const string & s)
}
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
{
std::vector<Symbol> res;
for (auto & a : parseAttrPath(s))
res.push_back(state.symbols.create(a));
return res;
}
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
Bindings & autoArgs, Value & vIn)
{
@ -60,11 +69,11 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
if (v->type != tAttrs)
throw TypeError(
format("the expression selected by the selection path '%1%' should be a set but is %2%")
% attrPath % showType(*v));
"the expression selected by the selection path '%1%' should be a set but is %2%",
attrPath,
showType(*v));
if (attr.empty())
throw Error(format("empty attribute name in selection path '%1%'") % attrPath);
throw Error("empty attribute name in selection path '%1%'", attrPath);
Bindings::iterator a = v->attrs->find(state.symbols.create(attr));
if (a == v->attrs->end())
@ -77,9 +86,9 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
if (!v->isList())
throw TypeError(
format("the expression selected by the selection path '%1%' should be a list but is %2%")
% attrPath % showType(*v));
"the expression selected by the selection path '%1%' should be a list but is %2%",
attrPath,
showType(*v));
if (attrIndex >= v->listSize())
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", attrIndex, attrPath);

View file

@ -16,4 +16,6 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
/* Heuristic to find the filename and lineno or a nix value. */
Pos findDerivationFilename(EvalState & state, Value & v, std::string what);
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s);
}

View file

@ -76,7 +76,11 @@ public:
{
auto a = get(name);
if (!a)
throw Error("attribute '%s' missing, at %s", name, pos);
throw Error({
.hint = hintfmt("attribute '%s' missing", name),
.nixCode = NixCode { .errPos = pos }
});
return *a;
}

View file

@ -7,20 +7,26 @@
namespace nix {
LocalNoInlineNoReturn(void throwEvalError(const char * s, const Pos & pos))
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{
throw EvalError(format(s) % pos);
throw EvalError({
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
{
throw TypeError(format(s) % showType(v));
throw TypeError(s, showType(v));
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v, const Pos & pos))
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
{
throw TypeError(format(s) % showType(v) % pos);
throw TypeError({
.hint = hintfmt(s, showType(v)),
.nixCode = NixCode { .errPos = pos }
});
}
@ -43,7 +49,7 @@ void EvalState::forceValue(Value & v, const Pos & pos)
else if (v.type == tApp)
callFunction(*v.app.left, *v.app.right, v, noPos);
else if (v.type == tBlackhole)
throwEvalError("infinite recursion encountered, at %1%", pos);
throwEvalError(pos, "infinite recursion encountered");
}
@ -59,7 +65,7 @@ inline void EvalState::forceAttrs(Value & v, const Pos & pos)
{
forceValue(v, pos);
if (v.type != tAttrs)
throwTypeError("value is %1% while a set was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a set was expected", v);
}
@ -75,7 +81,7 @@ inline void EvalState::forceList(Value & v, const Pos & pos)
{
forceValue(v, pos);
if (!v.isList())
throwTypeError("value is %1% while a list was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a list was expected", v);
}
/* Note: Various places expect the allocated memory to be zeroed. */

View file

@ -161,12 +161,12 @@ const Value *getPrimOp(const Value &v) {
}
string showType(const Value & v)
string showType(ValueType type)
{
switch (v.type) {
switch (type) {
case tInt: return "an integer";
case tBool: return "a boolean";
case tString: return v.string.context ? "a string with context" : "a string";
case tBool: return "a Boolean";
case tString: return "a string";
case tPath: return "a path";
case tNull: return "null";
case tAttrs: return "a set";
@ -175,14 +175,27 @@ string showType(const Value & v)
case tApp: return "a function application";
case tLambda: return "a function";
case tBlackhole: return "a black hole";
case tPrimOp: return "a built-in function";
case tPrimOpApp: return "a partially applied built-in function";
case tExternal: return "an external value";
case tFloat: return "a float";
}
abort();
}
string showType(const Value & v)
{
switch (v.type) {
case tString: return v.string.context ? "a string with context" : "a string";
case tPrimOp:
return fmt("the built-in function '%s'", string(v.primOp->name));
case tPrimOpApp:
return fmt("the partially applied built-in function '%s'", string(getPrimOp(v)->primOp->name));
case tExternal: return v.external->showType();
case tFloat: return "a float";
default:
return showType(v.type);
}
abort();
}
@ -323,6 +336,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sOutputHash(symbols.create("outputHash"))
, sOutputHashAlgo(symbols.create("outputHashAlgo"))
, sOutputHashMode(symbols.create("outputHashMode"))
, sRecurseForDerivations(symbols.create("recurseForDerivations"))
, repair(NoRepair)
, store(store)
, baseEnv(allocEnv(128))
@ -471,14 +485,21 @@ Value * EvalState::addConstant(const string & name, Value & v)
Value * EvalState::addPrimOp(const string & name,
size_t arity, PrimOpFun primOp)
{
auto name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
Symbol sym = symbols.create(name2);
/* Hack to make constants lazy: turn them into a application of
the primop to a dummy value. */
if (arity == 0) {
auto vPrimOp = allocValue();
vPrimOp->type = tPrimOp;
vPrimOp->primOp = new PrimOp(primOp, 1, sym);
Value v;
primOp(*this, noPos, nullptr, v);
mkApp(v, *vPrimOp, *vPrimOp);
return addConstant(name, v);
}
Value * v = allocValue();
string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
Symbol sym = symbols.create(name2);
v->type = tPrimOp;
v->primOp = new PrimOp(primOp, arity, sym);
staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
@ -501,52 +522,74 @@ Value & EvalState::getBuiltin(const string & name)
LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2))
{
throw EvalError(format(s) % s2);
throw EvalError(s, s2);
}
LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, const Pos & pos))
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2))
{
throw EvalError(format(s) % s2 % pos);
throw EvalError({
.hint = hintfmt(s, s2),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, const string & s3))
{
throw EvalError(format(s) % s2 % s3);
throw EvalError(s, s2, s3);
}
LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, const string & s3, const Pos & pos))
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2, const string & s3))
{
throw EvalError(format(s) % s2 % s3 % pos);
throw EvalError({
.hint = hintfmt(s, s2, s3),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwEvalError(const char * s, const Symbol & sym, const Pos & p1, const Pos & p2))
LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const Symbol & sym, const Pos & p2))
{
throw EvalError(format(s) % sym % p1 % p2);
// p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError({
.hint = hintfmt(s, sym, p2),
.nixCode = NixCode { .errPos = p1 }
});
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Pos & pos))
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{
throw TypeError(format(s) % pos);
throw TypeError({
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const string & s1))
{
throw TypeError(format(s) % s1);
throw TypeError(s, s1);
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const ExprLambda & fun, const Symbol & s2, const Pos & pos))
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const ExprLambda & fun, const Symbol & s2))
{
throw TypeError(format(s) % fun.showNamePos() % s2 % pos);
throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwAssertionError(const char * s, const string & s1, const Pos & pos))
LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const string & s1))
{
throw AssertionError(format(s) % s1 % pos);
throw AssertionError({
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwUndefinedVarError(const char * s, const string & s1, const Pos & pos))
LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char * s, const string & s1))
{
throw UndefinedVarError(format(s) % s1 % pos);
throw UndefinedVarError({
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2))
@ -614,7 +657,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
return j->value;
}
if (!env->prevWith)
throwUndefinedVarError("undefined variable '%1%' at %2%", var.name, var.pos);
throwUndefinedVarError(var.pos, "undefined variable '%1%'", var.name);
for (size_t l = env->prevWith; l; --l, env = env->up) ;
}
}
@ -812,7 +855,7 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const Pos & pos)
Value v;
e->eval(*this, env, v);
if (v.type != tBool)
throwTypeError("value is %1% while a Boolean was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a Boolean was expected", v);
return v.boolean;
}
@ -926,7 +969,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
Symbol nameSym = state.symbols.create(nameVal.string.s);
Bindings::iterator j = v.attrs->find(nameSym);
if (j != v.attrs->end())
throwEvalError("dynamic attribute '%1%' at %2% already defined at %3%", nameSym, i.pos, *j->pos);
throwEvalError(i.pos, "dynamic attribute '%1%' already defined at %2%", nameSym, *j->pos);
i.valueExpr->setName(nameSym);
/* Keep sorted order so find can catch duplicates */
@ -1014,7 +1057,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
} else {
state.forceAttrs(*vAttrs, pos);
if ((j = vAttrs->attrs->find(name)) == vAttrs->attrs->end())
throwEvalError("attribute '%1%' missing, at %2%", name, pos);
throwEvalError(pos, "attribute '%1%' missing", name);
}
vAttrs = j->value;
pos2 = j->pos;
@ -1140,7 +1183,7 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
}
if (fun.type != tLambda)
throwTypeError("attempt to call something which is not a function but %1%, at %2%", fun, pos);
throwTypeError(pos, "attempt to call something which is not a function but %1%", fun);
ExprLambda & lambda(*fun.lambda.fun);
@ -1168,8 +1211,8 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
for (auto & i : lambda.formals->formals) {
Bindings::iterator j = arg.attrs->find(i.name);
if (j == arg.attrs->end()) {
if (!i.def) throwTypeError("%1% called without required argument '%2%', at %3%",
lambda, i.name, pos);
if (!i.def) throwTypeError(pos, "%1% called without required argument '%2%'",
lambda, i.name);
env2.values[displ++] = i.def->maybeThunk(*this, env2);
} else {
attrsUsed++;
@ -1184,7 +1227,7 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
user. */
for (auto & i : *arg.attrs)
if (lambda.formals->argNames.find(i.name) == lambda.formals->argNames.end())
throwTypeError("%1% called with unexpected argument '%2%', at %3%", lambda, i.name, pos);
throwTypeError(pos, "%1% called with unexpected argument '%2%'", lambda, i.name);
abort(); // can't happen
}
}
@ -1273,7 +1316,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
if (!state.evalBool(env, cond, pos)) {
std::ostringstream out;
cond->show(out);
throwAssertionError("assertion '%1%' failed at %2%", out.str(), pos);
throwAssertionError(pos, "assertion '%1%' failed at %2%", out.str());
}
body->eval(state, env, v);
}
@ -1425,14 +1468,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
nf = n;
nf += vTmp.fpoint;
} else
throwEvalError("cannot add %1% to an integer, at %2%", showType(vTmp), pos);
throwEvalError(pos, "cannot add %1% to an integer", showType(vTmp));
} else if (firstType == tFloat) {
if (vTmp.type == tInt) {
nf += vTmp.integer;
} else if (vTmp.type == tFloat) {
nf += vTmp.fpoint;
} else
throwEvalError("cannot add %1% to a float, at %2%", showType(vTmp), pos);
throwEvalError(pos, "cannot add %1% to a float", showType(vTmp));
} else
s << state.coerceToString(pos, vTmp, context, false, firstType == tString);
}
@ -1443,7 +1486,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
mkFloat(v, nf);
else if (firstType == tPath) {
if (!context.empty())
throwEvalError("a string that refers to a store path cannot be appended to a path, at %1%", pos);
throwEvalError(pos, "a string that refers to a store path cannot be appended to a path");
auto path = canonPath(s.str());
mkPath(v, path.c_str());
} else
@ -1492,7 +1535,7 @@ NixInt EvalState::forceInt(Value & v, const Pos & pos)
{
forceValue(v, pos);
if (v.type != tInt)
throwTypeError("value is %1% while an integer was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while an integer was expected", v);
return v.integer;
}
@ -1503,7 +1546,7 @@ NixFloat EvalState::forceFloat(Value & v, const Pos & pos)
if (v.type == tInt)
return v.integer;
else if (v.type != tFloat)
throwTypeError("value is %1% while a float was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a float was expected", v);
return v.fpoint;
}
@ -1512,7 +1555,7 @@ bool EvalState::forceBool(Value & v, const Pos & pos)
{
forceValue(v, pos);
if (v.type != tBool)
throwTypeError("value is %1% while a Boolean was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a Boolean was expected", v);
return v.boolean;
}
@ -1527,7 +1570,7 @@ void EvalState::forceFunction(Value & v, const Pos & pos)
{
forceValue(v, pos);
if (v.type != tLambda && v.type != tPrimOp && v.type != tPrimOpApp && !isFunctor(v))
throwTypeError("value is %1% while a function was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a function was expected", v);
}
@ -1536,7 +1579,7 @@ string EvalState::forceString(Value & v, const Pos & pos)
forceValue(v, pos);
if (v.type != tString) {
if (pos)
throwTypeError("value is %1% while a string was expected, at %2%", v, pos);
throwTypeError(pos, "value is %1% while a string was expected", v);
else
throwTypeError("value is %1% while a string was expected", v);
}
@ -1565,8 +1608,8 @@ string EvalState::forceStringNoCtx(Value & v, const Pos & pos)
string s = forceString(v, pos);
if (v.string.context) {
if (pos)
throwEvalError("the string '%1%' is not allowed to refer to a store path (such as '%2%'), at %3%",
v.string.s, v.string.context[0], pos);
throwEvalError(pos, "the string '%1%' is not allowed to refer to a store path (such as '%2%')",
v.string.s, v.string.context[0]);
else
throwEvalError("the string '%1%' is not allowed to refer to a store path (such as '%2%')",
v.string.s, v.string.context[0]);
@ -1622,7 +1665,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
return *maybeString;
}
auto i = v.attrs->find(sOutPath);
if (i == v.attrs->end()) throwTypeError("cannot coerce a set to a string, at %1%", pos);
if (i == v.attrs->end()) throwTypeError(pos, "cannot coerce a set to a string");
return coerceToString(pos, *i->value, context, coerceMore, copyToStore);
}
@ -1653,7 +1696,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
}
}
throwTypeError("cannot coerce %1% to a string, at %2%", v, pos);
throwTypeError(pos, "cannot coerce %1% to a string", v);
}
@ -1669,10 +1712,10 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
else {
auto p = settings.readOnlyMode
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, repair);
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
dstPath = store->printStorePath(p);
srcToStore.insert_or_assign(path, std::move(p));
printMsg(Verbosity::Chatty, "copied source '%1%' -> '%2%'", path, dstPath);
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
}
context.insert(dstPath);
@ -1684,7 +1727,7 @@ Path EvalState::coerceToPath(const Pos & pos, Value & v, PathSet & context)
{
string path = coerceToString(pos, v, context, false, false);
if (path == "" || path[0] != '/')
throwEvalError("string '%1%' doesn't represent an absolute path, at %2%", path, pos);
throwEvalError(pos, "string '%1%' doesn't represent an absolute path", path);
return path;
}
@ -1891,8 +1934,10 @@ void EvalState::printStats()
string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
{
throw TypeError(format("cannot coerce %1% to a string, at %2%") %
showType() % pos);
throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()),
.nixCode = NixCode { .errPos = pos }
});
}

View file

@ -18,7 +18,7 @@ namespace nix {
class Store;
class EvalState;
struct StorePath;
class StorePath;
enum RepairFlag : bool;
@ -74,7 +74,8 @@ public:
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
sFile, sLine, sColumn, sFunctor, sToString,
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
sOutputHash, sOutputHashAlgo, sOutputHashMode;
sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations;
Symbol sDerivationNix;
/* If set, force copying files to the Nix store even if they
@ -324,6 +325,7 @@ private:
/* Return a string representing the type of the value `v'. */
string showType(ValueType type);
string showType(const Value & v);
/* Decode a context string !<name>!<path> into a pair <path,

View file

@ -6,13 +6,13 @@ namespace nix {
FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) {
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
printMsg(Verbosity::Info, "function-trace entered %1% at %2%", pos, ns.count());
printMsg(lvlInfo, "function-trace entered %1% at %2%", pos, ns.count());
}
FunctionCallTrace::~FunctionCallTrace() {
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
printMsg(Verbosity::Info, "function-trace exited %1% at %2%", pos, ns.count());
printMsg(lvlInfo, "function-trace exited %1% at %2%", pos, ns.count());
}
}

View file

@ -348,7 +348,7 @@ static void getDerivations(EvalState & state, Value & vIn,
should we recurse into it? => Only if it has a
`recurseForDerivations = true' attribute. */
if (i->value->type == tAttrs) {
Bindings::iterator j = i->value->attrs->find(state.symbols.create("recurseForDerivations"));
Bindings::iterator j = i->value->attrs->find(state.sRecurseForDerivations);
if (j != i->value->attrs->end() && state.forceBool(*j->value, *j->pos))
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
}

View file

@ -127,14 +127,14 @@ or { return OR_KW; }
try {
yylval->n = boost::lexical_cast<int64_t>(yytext);
} catch (const boost::bad_lexical_cast &) {
throw ParseError(format("invalid integer '%1%'") % yytext);
throw ParseError("invalid integer '%1%'", yytext);
}
return INT;
}
{FLOAT} { errno = 0;
yylval->nf = strtod(yytext, 0);
if (errno != 0)
throw ParseError(format("invalid float '%1%'") % yytext);
throw ParseError("invalid float '%1%'", yytext);
return FLOAT;
}
@ -219,4 +219,3 @@ or { return OR_KW; }
}
%%

View file

@ -8,7 +8,7 @@ libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexe
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
libexpr_LIBS = libutil libstore libfetchers libnixrust
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS =
ifneq ($(OS), FreeBSD)

View file

@ -267,8 +267,11 @@ void ExprVar::bindVars(const StaticEnv & env)
/* Otherwise, the variable must be obtained from the nearest
enclosing `with'. If there is no `with', then we can issue an
"undefined variable" error now. */
if (withLevel == -1) throw UndefinedVarError(format("undefined variable '%1%' at %2%") % name % pos);
if (withLevel == -1)
throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name),
.nixCode = NixCode { .errPos = pos }
});
fromWith = true;
this->level = withLevel;
}

View file

@ -2,6 +2,7 @@
#include "value.hh"
#include "symbol-table.hh"
#include "error.hh"
#include <map>
@ -235,8 +236,10 @@ struct ExprLambda : Expr
: pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body)
{
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError(format("duplicate formal function argument '%1%' at %2%")
% arg % pos);
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
.nixCode = NixCode { .errPos = pos }
});
};
void setName(Symbol & name);
string showNamePos() const;

View file

@ -31,7 +31,7 @@ namespace nix {
Expr * result;
Path basePath;
Symbol path;
string error;
ErrorInfo error;
Symbol sLetBody;
ParseData(EvalState & state)
: state(state)
@ -64,15 +64,20 @@ namespace nix {
static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos)
{
throw ParseError(format("attribute '%1%' at %2% already defined at %3%")
% showAttrPath(attrPath) % pos % prevPos);
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
.nixCode = NixCode { .errPos = pos },
});
}
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{
throw ParseError(format("attribute '%1%' at %2% already defined at %3%")
% attr % pos % prevPos);
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.nixCode = NixCode { .errPos = pos },
});
}
@ -140,8 +145,11 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
{
if (!formals->argNames.insert(formal.name).second)
throw ParseError(format("duplicate formal function argument '%1%' at %2%")
% formal.name % pos);
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'",
formal.name),
.nixCode = NixCode { .errPos = pos },
});
formals->formals.push_front(formal);
}
@ -249,8 +257,10 @@ static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
{
data->error = (format("%1%, at %2%")
% error % makeCurPos(*loc, data)).str();
data->error = {
.hint = hintfmt(error),
.nixCode = NixCode { .errPos = makeCurPos(*loc, data) }
};
}
@ -327,8 +337,10 @@ expr_function
{ $$ = new ExprWith(CUR_POS, $2, $4); }
| LET binds IN expr_function
{ if (!$2->dynamicAttrs.empty())
throw ParseError(format("dynamic attributes not allowed in let at %1%")
% CUR_POS);
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"),
.nixCode = NixCode { .errPos = CUR_POS },
});
$$ = new ExprLet($2, $4);
}
| expr_if
@ -405,7 +417,10 @@ expr_simple
| URI {
static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
if (noURLLiterals)
throw ParseError("URL literals are disabled, at %s", CUR_POS);
throw ParseError({
.hint = hintfmt("URL literals are disabled"),
.nixCode = NixCode { .errPos = CUR_POS }
});
$$ = new ExprString(data->symbols.create($1));
}
| '(' expr ')' { $$ = $2; }
@ -475,8 +490,10 @@ attrs
$$->push_back(AttrName(str->s));
delete str;
} else
throw ParseError(format("dynamic attributes not allowed in inherit at %1%")
% makeCurPos(@2, data));
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"),
.nixCode = NixCode { .errPos = makeCurPos(@2, data) },
});
}
| { $$ = new AttrPath; }
;
@ -626,7 +643,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
Expr * EvalState::parseStdin()
{
//Activity act(*logger, Verbosity::Talkative, format("parsing standard input"));
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
return parseExprFromString(drainFD(0), absPath("."));
}
@ -671,11 +688,13 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
Path res = r.second + suffix;
if (pathExists(res)) return canonPath(res);
}
format f = format(
"file '%1%' was not found in the Nix search path (add it using $NIX_PATH or -I)"
+ string(pos ? ", at %2%" : ""));
f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
throw ThrownError(f % path % pos);
throw ThrownError({
.hint = hintfmt(evalSettings.pureEval
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
.nixCode = NixCode { .errPos = pos }
});
}
@ -691,7 +710,10 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
res = { true, store->toRealPath(fetchers::downloadTarball(
store, resolveUri(elem.second), "source", false).storePath) };
} catch (FileTransferError & e) {
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
logWarning({
.name = "Entry download",
.hint = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
});
res = { false, "" };
}
} else {
@ -699,7 +721,10 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (pathExists(path))
res = { true, path };
else {
printError(format("warning: Nix search path entry '%1%' does not exist, ignoring") % elem.second);
logWarning({
.name = "Entry not found",
.hint = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
});
res = { false, "" };
}
}

View file

@ -50,20 +50,20 @@ void EvalState::realiseContext(const PathSet & context)
std::vector<StorePathWithOutputs> drvs;
for (auto & i : context) {
std::pair<string, string> decoded = decodeContext(i);
auto ctx = store->parseStorePath(decoded.first);
auto [ctxS, outputName] = decodeContext(i);
auto ctx = store->parseStorePath(ctxS);
if (!store->isValidPath(ctx))
throw InvalidPathError(store->printStorePath(ctx));
if (!decoded.second.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx.clone(), {decoded.second}});
if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx, {outputName}});
/* Add the output of this derivation to the allowed
paths. */
if (allowedPaths) {
auto drv = store->derivationFromPath(store->parseStorePath(decoded.first));
DerivationOutputs::iterator i = drv.outputs.find(decoded.second);
auto drv = store->derivationFromPath(ctx);
DerivationOutputs::iterator i = drv.outputs.find(outputName);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'", decoded.first, decoded.second);
throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
allowedPaths->insert(store->printStorePath(i->second.path));
}
}
@ -79,6 +79,7 @@ void EvalState::realiseContext(const PathSet & context)
StorePathSet willBuild, willSubstitute, unknown;
unsigned long long downloadSize, narSize;
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
store->buildPaths(drvs);
}
@ -93,8 +94,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(format("cannot import '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
@ -170,8 +173,12 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(format("cannot import '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
throw EvalError({
.hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
path = state.checkSourcePath(path);
@ -180,17 +187,17 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
void *handle = dlopen(path.c_str(), RTLD_LAZY | RTLD_LOCAL);
if (!handle)
throw EvalError(format("could not open '%1%': %2%") % path % dlerror());
throw EvalError("could not open '%1%': %2%", path, dlerror());
dlerror();
ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str());
if(!func) {
char *message = dlerror();
if (message)
throw EvalError(format("could not load symbol '%1%' from '%2%': %3%") % sym % path % message);
throw EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message);
else
throw EvalError(format("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected")
% sym % path);
throw EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected",
sym, path);
}
(func)(state, v);
@ -206,7 +213,10 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
auto elems = args[0]->listElems();
auto count = args[0]->listSize();
if (count == 0) {
throw EvalError(format("at least one argument to 'exec' required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"),
.nixCode = NixCode { .errPos = pos }
});
}
PathSet context;
auto program = state.coerceToString(pos, *elems[0], context, false, false);
@ -217,8 +227,11 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(format("cannot execute '%1%', since path '%2%' is not valid, at %3%")
% program % e.path % pos);
throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
auto output = runProgram(program, true, commandArgs);
@ -226,13 +239,13 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try {
parsed = state.parseExprFromString(output, pos.file);
} catch (Error & e) {
e.addPrefix(format("While parsing the output from '%1%', at %2%\n") % program % pos);
e.addPrefix(fmt("While parsing the output from '%1%', at %2%\n", program, pos));
throw;
}
try {
state.eval(parsed, v);
} catch (Error & e) {
e.addPrefix(format("While evaluating the output from '%1%', at %2%\n") % program % pos);
e.addPrefix(fmt("While evaluating the output from '%1%', at %2%\n", program, pos));
throw;
}
}
@ -338,7 +351,7 @@ struct CompareValues
if (v1->type == tInt && v2->type == tFloat)
return v1->integer < v2->fpoint;
if (v1->type != v2->type)
throw EvalError(format("cannot compare %1% with %2%") % showType(*v1) % showType(*v2));
throw EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2));
switch (v1->type) {
case tInt:
return v1->integer < v2->integer;
@ -349,7 +362,7 @@ struct CompareValues
case tPath:
return strcmp(v1->path, v2->path) < 0;
default:
throw EvalError(format("cannot compare %1% with %2%") % showType(*v1) % showType(*v2));
throw EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2));
}
}
};
@ -370,7 +383,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator startSet =
args[0]->attrs->find(state.symbols.create("startSet"));
if (startSet == args[0]->attrs->end())
throw EvalError(format("attribute 'startSet' required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("attribute 'startSet' required"),
.nixCode = NixCode { .errPos = pos }
});
state.forceList(*startSet->value, pos);
ValueList workSet;
@ -381,7 +397,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator op =
args[0]->attrs->find(state.symbols.create("operator"));
if (op == args[0]->attrs->end())
throw EvalError(format("attribute 'operator' required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*op->value, pos);
/* Construct the closure by applying the operator to element of
@ -400,7 +419,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator key =
e->attrs->find(state.symbols.create("key"));
if (key == e->attrs->end())
throw EvalError(format("attribute 'key' required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*key->value, pos);
if (!doneKeys.insert(key->value).second) continue;
@ -430,7 +452,7 @@ static void prim_abort(EvalState & state, const Pos & pos, Value * * args, Value
{
PathSet context;
string s = state.coerceToString(pos, *args[0], context);
throw Abort(format("evaluation aborted with the following error message: '%1%'") % s);
throw Abort("evaluation aborted with the following error message: '%1%'", s);
}
@ -505,9 +527,9 @@ static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value
{
state.forceValue(*args[0], pos);
if (args[0]->type == tString)
printError(format("trace: %1%") % args[0]->string.s);
printError("trace: %1%", args[0]->string.s);
else
printError(format("trace: %1%") % *args[0]);
printError("trace: %1%", *args[0]);
state.forceValue(*args[1], pos);
v = *args[1];
}
@ -532,13 +554,16 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Figure out the name first (for stack backtraces). */
Bindings::iterator attr = args[0]->attrs->find(state.sName);
if (attr == args[0]->attrs->end())
throw EvalError(format("required attribute 'name' missing, at %1%") % pos);
throw EvalError({
.hint = hintfmt("required attribute 'name' missing"),
.nixCode = NixCode { .errPos = pos }
});
string drvName;
Pos & posDrvName(*attr->pos);
try {
drvName = state.forceStringNoCtx(*attr->value, pos);
} catch (Error & e) {
e.addPrefix(format("while evaluating the derivation attribute 'name' at %1%:\n") % posDrvName);
e.addPrefix(fmt("while evaluating the derivation attribute 'name' at %1%:\n", posDrvName));
throw;
}
@ -575,25 +600,38 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
auto handleHashMode = [&](const std::string & s) {
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
else throw EvalError("invalid value '%s' for 'outputHashMode' attribute, at %s", s, posDrvName);
else
throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.nixCode = NixCode { .errPos = posDrvName }
});
};
auto handleOutputs = [&](const Strings & ss) {
outputs.clear();
for (auto & j : ss) {
if (outputs.find(j) != outputs.end())
throw EvalError(format("duplicate derivation output '%1%', at %2%") % j % posDrvName);
throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j),
.nixCode = NixCode { .errPos = posDrvName }
});
/* !!! Check whether j is a valid attribute
name. */
/* Derivations cannot be named drv, because
then we'd have an attribute drvPath in
the resulting set. */
if (j == "drv")
throw EvalError(format("invalid derivation output name 'drv', at %1%") % posDrvName);
throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ),
.nixCode = NixCode { .errPos = posDrvName }
});
outputs.insert(j);
}
if (outputs.empty())
throw EvalError(format("derivation cannot have an empty set of outputs, at %1%") % posDrvName);
throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"),
.nixCode = NixCode { .errPos = posDrvName }
});
};
try {
@ -686,9 +724,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
StorePathSet refs;
state.store->computeFSClosure(state.store->parseStorePath(std::string_view(path).substr(1)), refs);
for (auto & j : refs) {
drv.inputSrcs.insert(j.clone());
drv.inputSrcs.insert(j);
if (j.isDerivation())
drv.inputDrvs[j.clone()] = state.store->queryDerivationOutputNames(j);
drv.inputDrvs[j] = state.store->readDerivation(j).outputNames();
}
}
@ -705,27 +743,41 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Do we have all required attributes? */
if (drv.builder == "")
throw EvalError(format("required attribute 'builder' missing, at %1%") % posDrvName);
throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"),
.nixCode = NixCode { .errPos = posDrvName }
});
if (drv.platform == "")
throw EvalError(format("required attribute 'system' missing, at %1%") % posDrvName);
throw EvalError({
.hint = hintfmt("required attribute 'system' missing"),
.nixCode = NixCode { .errPos = posDrvName }
});
/* Check whether the derivation name is valid. */
if (isDerivation(drvName))
throw EvalError("derivation names are not allowed to end in '%s', at %s", drvExtension, posDrvName);
throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.nixCode = NixCode { .errPos = posDrvName }
});
if (outputHash) {
/* Handle fixed-output derivations. */
if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error(format("multiple outputs are not supported in fixed-output derivations, at %1%") % posDrvName);
throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.nixCode = NixCode { .errPos = posDrvName }
});
std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo);
Hash h(*outputHash, ht);
Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput(
std::move(outPath),
FileSystemHash(ingestionMethod, std::move(h))));
drv.outputs.insert_or_assign("out", DerivationOutput {
.path = std::move(outPath),
.hash = FileSystemHash { ingestionMethod, std::move(h) },
});
}
else {
@ -738,7 +790,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = "";
drv.outputs.insert_or_assign(i,
DerivationOutput(StorePath::dummy.clone(), std::optional<FileSystemHash>()));
DerivationOutput {
.path = StorePath::dummy,
.hash = std::optional<FileSystemHash> {},
});
}
Hash h = hashDerivationModulo(*state.store, Derivation(drv), true);
@ -747,7 +802,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
auto outPath = state.store->makeOutputPath(i, h, drvName);
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i,
DerivationOutput(std::move(outPath), std::optional<FileSystemHash>()));
DerivationOutput {
.path = std::move(outPath),
.hash = std::optional<FileSystemHash>(),
});
}
}
@ -755,12 +813,12 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
auto drvPath = writeDerivation(state.store, drv, drvName, state.repair);
auto drvPathS = state.store->printStorePath(drvPath);
printMsg(Verbosity::Chatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
/* Optimisation, but required in read-only mode! because in that
case we don't actually write store derivations, so we can't
read them later. */
drvHashes.insert_or_assign(drvPath.clone(),
drvHashes.insert_or_assign(drvPath,
hashDerivationModulo(*state.store, Derivation(drv), false));
state.mkAttrs(v, 1 + drv.outputs.size());
@ -817,7 +875,10 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
e.g. nix-push does the right thing. */
if (!state.store->isStorePath(path)) path = canonPath(path, true);
if (!state.store->isInStore(path))
throw EvalError(format("path '%1%' is not in the Nix store, at %2%") % path % pos);
throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path),
.nixCode = NixCode { .errPos = pos }
});
Path path2 = state.store->toStorePath(path);
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(path2));
@ -833,9 +894,12 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(format(
"cannot check the existence of '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
throw EvalError({
.hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
try {
@ -878,12 +942,14 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(format("cannot read '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
if (s.find((char) 0) != string::npos)
throw Error(format("the contents of the file '%1%' cannot be represented as a Nix string") % path);
throw Error("the contents of the file '%1%' cannot be represented as a Nix string", path);
mkString(v, s.c_str());
}
@ -907,7 +973,10 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
i = v2.attrs->find(state.symbols.create("path"));
if (i == v2.attrs->end())
throw EvalError(format("attribute 'path' missing, at %1%") % pos);
throw EvalError({
.hint = hintfmt("attribute 'path' missing"),
.nixCode = NixCode { .errPos = pos }
});
PathSet context;
string path = state.coerceToString(pos, *i->value, context, false, false);
@ -915,8 +984,10 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(format("cannot find '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
searchPath.emplace_back(prefix, path);
@ -933,12 +1004,15 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
string type = state.forceStringNoCtx(*args[0], pos);
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error(format("unknown hash type '%1%', at %2%") % type % pos);
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
});
PathSet context; // discarded
Path p = state.coerceToPath(pos, *args[1], context);
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base::Base16, false), context);
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
}
/* Read a directory (without . or ..) */
@ -949,8 +1023,10 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
try {
state.realiseContext(ctx);
} catch (InvalidPathError & e) {
throw EvalError(format("cannot read '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
DirEntries entries = readDirectory(state.checkSourcePath(path));
@ -1020,9 +1096,13 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
for (auto path : context) {
if (path.at(0) != '/')
throw EvalError(format(
throw EvalError( {
.hint = hintfmt(
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%), at %3%") % name % path % pos);
"to a derivation but contains (%2%)",
name, path),
.nixCode = NixCode { .errPos = pos }
});
refs.insert(state.store->parseStorePath(path));
}
@ -1074,8 +1154,8 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
Path dstPath;
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
dstPath = state.store->printStorePath(settings.readOnlyMode
? state.store->computeStorePathForPath(name, path, method, HashType::SHA256, filter).first
: state.store->addToStore(name, path, method, HashType::SHA256, filter, state.repair));
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
} else
@ -1090,11 +1170,19 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
PathSet context;
Path path = state.coerceToPath(pos, *args[1], context);
if (!context.empty())
throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos);
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
throw TypeError({
.hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])),
.nixCode = NixCode { .errPos = pos }
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
}
@ -1114,7 +1202,10 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
PathSet context;
path = state.coerceToPath(*attr.pos, *attr.value, context);
if (!context.empty())
throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % *attr.pos);
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = *attr.pos }
});
} else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "filter") {
@ -1123,12 +1214,18 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
} else if (n == "recursive")
method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
else if (n == "sha256")
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else
throw EvalError(format("unsupported argument '%1%' to 'addPath', at %2%") % attr.name % *attr.pos);
throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (path.empty())
throw EvalError(format("'path' required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("'path' required"),
.nixCode = NixCode { .errPos = pos }
});
if (name.empty())
name = baseNameOf(path);
@ -1186,7 +1283,10 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
// !!! Should we create a symbol here or just do a lookup?
Bindings::iterator i = args[1]->attrs->find(state.symbols.create(attr));
if (i == args[1]->attrs->end())
throw EvalError(format("attribute '%1%' missing, at %2%") % attr % pos);
throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr),
.nixCode = NixCode { .errPos = pos }
});
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
state.forceValue(*i->value, pos);
@ -1266,15 +1366,20 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
Bindings::iterator j = v2.attrs->find(state.sName);
if (j == v2.attrs->end())
throw TypeError(format("'name' attribute missing in a call to 'listToAttrs', at %1%") % pos);
throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
});
string name = state.forceStringNoCtx(*j->value, pos);
Symbol sym = state.symbols.create(name);
if (seen.insert(sym).second) {
Bindings::iterator j2 = v2.attrs->find(state.symbols.create(state.sValue));
if (j2 == v2.attrs->end())
throw TypeError(format("'value' attribute missing in a call to 'listToAttrs', at %1%") % pos);
throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
});
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
}
}
@ -1347,7 +1452,10 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
{
state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(format("'functionArgs' requires a function, at %1%") % pos);
throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"),
.nixCode = NixCode { .errPos = pos }
});
if (!args[0]->lambda.fun->matchAttrs) {
state.mkAttrs(v, 0);
@ -1400,7 +1508,10 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
{
state.forceList(list, pos);
if (n < 0 || (unsigned int) n >= list.listSize())
throw Error(format("list index %1% is out of bounds, at %2%") % n % pos);
throw Error({
.hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
}
@ -1427,7 +1538,11 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
{
state.forceList(*args[0], pos);
if (args[0]->listSize() == 0)
throw Error(format("'tail' called on an empty list, at %1%") % pos);
throw Error({
.hint = hintfmt("'tail' called on an empty list"),
.nixCode = NixCode { .errPos = pos }
});
state.mkList(v, args[0]->listSize() - 1);
for (unsigned int n = 0; n < v.listSize(); ++n)
v.listElems()[n] = args[0]->listElems()[n + 1];
@ -1568,7 +1683,10 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
auto len = state.forceInt(*args[1], pos);
if (len < 0)
throw EvalError(format("cannot create list of size %1%, at %2%") % len % pos);
throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len),
.nixCode = NixCode { .errPos = pos }
});
state.mkList(v, len);
@ -1726,7 +1844,11 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
state.forceValue(*args[1], pos);
NixFloat f2 = state.forceFloat(*args[1], pos);
if (f2 == 0) throw EvalError(format("division by zero, at %1%") % pos);
if (f2 == 0)
throw EvalError({
.hint = hintfmt("division by zero"),
.nixCode = NixCode { .errPos = pos }
});
if (args[0]->type == tFloat || args[1]->type == tFloat) {
mkFloat(v, state.forceFloat(*args[0], pos) / state.forceFloat(*args[1], pos));
@ -1735,7 +1857,11 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
NixInt i2 = state.forceInt(*args[1], pos);
/* Avoid division overflow as it might raise SIGFPE. */
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError(format("overflow in integer division, at %1%") % pos);
throw EvalError({
.hint = hintfmt("overflow in integer division"),
.nixCode = NixCode { .errPos = pos }
});
mkInt(v, i1 / i2);
}
}
@ -1791,7 +1917,11 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
PathSet context;
string s = state.coerceToString(pos, *args[2], context);
if (start < 0) throw EvalError(format("negative start position in 'substring', at %1%") % pos);
if (start < 0)
throw EvalError({
.hint = hintfmt("negative start position in 'substring'"),
.nixCode = NixCode { .errPos = pos }
});
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
}
@ -1811,12 +1941,15 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
string type = state.forceStringNoCtx(*args[0], pos);
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error(format("unknown hash type '%1%', at %2%") % type % pos);
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
});
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
mkString(v, hashString(*ht, s).to_string(Base::Base16, false), context);
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
}
@ -1854,9 +1987,15 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
} catch (std::regex_error &e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError("memory limit exceeded by regular expression '%s', at %s", re, pos);
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
} else {
throw EvalError("invalid regular expression '%s', at %s", re, pos);
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
}
}
}
@ -1921,9 +2060,15 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
} catch (std::regex_error &e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError("memory limit exceeded by regular expression '%s', at %s", re, pos);
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
} else {
throw EvalError("invalid regular expression '%s', at %s", re, pos);
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
}
}
}
@ -1954,7 +2099,10 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
state.forceList(*args[0], pos);
state.forceList(*args[1], pos);
if (args[0]->listSize() != args[1]->listSize())
throw EvalError(format("'from' and 'to' arguments to 'replaceStrings' have different lengths, at %1%") % pos);
throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.nixCode = NixCode { .errPos = pos }
});
vector<string> from;
from.reserve(args[0]->listSize());
@ -2057,10 +2205,11 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun)
RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
std::optional<std::string> requiredFeature)
{
if (!primOps) primOps = new PrimOps;
primOps->emplace_back(name, arity, fun);
primOps->push_back({name, arity, fun, requiredFeature});
}
@ -2252,7 +2401,8 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
addPrimOp(std::get<0>(primOp), std::get<1>(primOp), std::get<2>(primOp));
if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
addPrimOp(primOp.name, primOp.arity, primOp.primOp);
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */

View file

@ -7,12 +7,25 @@ namespace nix {
struct RegisterPrimOp
{
typedef std::vector<std::tuple<std::string, size_t, PrimOpFun>> PrimOps;
struct Info
{
std::string name;
size_t arity;
PrimOpFun primOp;
std::optional<std::string> requiredFeature;
};
typedef std::vector<Info> PrimOps;
static PrimOps * primOps;
/* You can register a constant by passing an arity of 0. fun
will get called during EvalState initialization, so there
may be primops not yet added and builtins is not yet sorted. */
RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun);
RegisterPrimOp(
std::string name,
size_t arity,
PrimOpFun fun,
std::optional<std::string> requiredFeature = {});
};
/* These primops are disabled without enableNativeCode, but plugins

View file

@ -146,7 +146,10 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
auto sAllOutputs = state.symbols.create("allOutputs");
for (auto & i : *args[1]->attrs) {
if (!state.store->isStorePath(i.name))
throw EvalError("Context key '%s' is not a store path, at %s", i.name, i.pos);
throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(i.name));
state.forceAttrs(*i.value, *i.pos);
@ -160,7 +163,10 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter != i.value->attrs->end()) {
if (state.forceBool(*iter->value, *iter->pos)) {
if (!isDerivation(i.name)) {
throw EvalError("Tried to add all-outputs context of %s, which is not a derivation, to a string, at %s", i.name, i.pos);
throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
}
context.insert("=" + string(i.name));
}
@ -170,7 +176,10 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter != i.value->attrs->end()) {
state.forceList(*iter->value, *iter->pos);
if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError("Tried to add derivation output context of %s, which is not a derivation, to a string, at %s", i.name, i.pos);
throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
}
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
auto name = state.forceStringNoCtx(*iter->value->listElems()[n], *iter->pos);

View file

@ -29,17 +29,23 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else if (n == "ref")
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "rev")
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA1);
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "submodules")
fetchSubmodules = state.forceBool(*attr.value, *attr.pos);
else
throw EvalError("unsupported argument '%s' to 'fetchGit', at %s", attr.name, *attr.pos);
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (url.empty())
throw EvalError(format("'url' argument required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
} else
url = state.coerceToString(pos, *args[0], context, false, false);
@ -67,7 +73,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1));
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
// Backward compatibility: set 'revCount' to 0 for a dirty tree.

View file

@ -31,18 +31,24 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
// be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
if (std::regex_match(value, revRegex))
rev = Hash(value, HashType::SHA1);
rev = Hash(value, htSHA1);
else
ref = value;
}
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
throw EvalError("unsupported argument '%s' to 'fetchMercurial', at %s", attr.name, *attr.pos);
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (url.empty())
throw EvalError(format("'url' argument required, at %1%") % pos);
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
} else
url = state.coerceToString(pos, *args[0], context, false, false);
@ -71,7 +77,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1));
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
if (tree.info.revCount)

View file

@ -23,7 +23,7 @@ void emitTreeAttrs(
assert(tree.info.narHash);
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
tree.info.narHash.to_string(Base::SRI));
tree.info.narHash.to_string(SRI, true));
if (input->getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
@ -66,7 +66,10 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
}
if (!attrs.count("type"))
throw Error("attribute 'type' is missing in call to 'fetchTree', at %s", pos);
throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.nixCode = NixCode { .errPos = pos }
});
input = fetchers::inputFromAttrs(attrs);
} else
@ -103,17 +106,21 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
if (n == "url")
url = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "sha256")
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
throw EvalError("unsupported argument '%s' to '%s', at %s",
attr.name, who, *attr.pos);
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (!url)
throw EvalError("'url' argument required, at %s", pos);
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
} else
url = state.forceStringNoCtx(*args[0], pos);
@ -137,10 +144,10 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
: hashFile(HashType::SHA256, path);
: hashFile(htSHA256, path);
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
*url, expectedHash->to_string(), hash.to_string());
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
if (state.allowedPaths)

View file

@ -81,7 +81,10 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
try {
visit(v, parser(tomlStream).parse());
} catch (std::runtime_error & e) {
throw EvalError("while parsing a TOML string at %s: %s", pos, e.what());
throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
.nixCode = NixCode { .errPos = pos }
});
}
}

View file

@ -79,7 +79,7 @@ void printValueAsJSON(EvalState & state, bool strict,
break;
default:
throw TypeError(format("cannot convert %1% to JSON") % showType(v));
throw TypeError("cannot convert %1% to JSON", showType(v));
}
}
@ -93,7 +93,7 @@ void printValueAsJSON(EvalState & state, bool strict,
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
JSONPlaceholder & out, PathSet & context) const
{
throw TypeError(format("cannot convert %1% to JSON") % showType());
throw TypeError("cannot convert %1% to JSON", showType());
}

View file

@ -36,7 +36,7 @@ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
if (res) {
if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
// FIXME: require SRI hash.
res->narHash = Hash(*narHash);
res->narHash = newHashAllowEmpty(*narHash, {});
return res;
}
}
@ -47,7 +47,7 @@ Attrs Input::toAttrs() const
{
auto attrs = toAttrsInternal();
if (narHash)
attrs.emplace("narHash", narHash->to_string(Base::SRI));
attrs.emplace("narHash", narHash->to_string(SRI, true));
attrs.emplace("type", type());
return attrs;
}
@ -67,7 +67,7 @@ std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store)
if (narHash && narHash != input->narHash)
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, narHash->to_string(Base::SRI), input->narHash->to_string(Base::SRI));
to_string(), tree.actualPath, narHash->to_string(SRI, true), input->narHash->to_string(SRI, true));
return {std::move(tree), input};
}

View file

@ -95,7 +95,7 @@ struct GitInput : Input
auto input = std::make_shared<GitInput>(*this);
assert(!rev || rev->type == HashType::SHA1);
assert(!rev || rev->type == htSHA1);
std::string cacheType = "git";
if (shallow) cacheType += "-shallow";
@ -195,7 +195,7 @@ struct GitInput : Input
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter);
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto tree = Tree {
.actualPath = store->printStorePath(storePath),
@ -225,21 +225,21 @@ struct GitInput : Input
if (isLocal) {
if (!input->rev)
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), HashType::SHA1);
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
repoDir = actualUrl;
} else {
if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1);
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
if (!rev || rev == rev2) {
input->rev = rev2;
return makeResult(res->first, std::move(res->second));
}
}
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false);
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
repoDir = cacheDir;
if (!pathExists(cacheDir)) {
@ -277,7 +277,7 @@ struct GitInput : Input
}
if (doFetch) {
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Git repository '%s'", actualUrl));
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
// FIXME: git stderr messes up our progress indicator, so
// we're using --quiet for now. Should process its stderr.
@ -301,7 +301,7 @@ struct GitInput : Input
}
if (!input->rev)
input->rev = Hash(chomp(readFile(localRefFile)), HashType::SHA1);
input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
}
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
@ -350,7 +350,7 @@ struct GitInput : Input
unpackTarfile(*source, tmpDir);
}
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, HashType::SHA256, filter);
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
@ -426,7 +426,7 @@ struct GitInputScheme : InputScheme
input->ref = *ref;
}
if (auto rev = maybeGetStrAttr(attrs, "rev"))
input->rev = Hash(*rev, HashType::SHA1);
input->rev = Hash(*rev, htSHA1);
input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);

View file

@ -45,7 +45,7 @@ struct GitHubInput : Input
auto path = owner + "/" + repo;
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(Base::Base16, false);
if (rev) path += "/" + rev->to_string(Base16, false);
return ParsedURL {
.scheme = "github",
.path = path,
@ -76,7 +76,7 @@ struct GitHubInput : Input
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
rev = Hash(json["sha"], HashType::SHA1);
rev = Hash(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
}
@ -106,7 +106,7 @@ struct GitHubInput : Input
// might have stricter rate limits.
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
owner, repo, rev->to_string(Base::Base16, false));
owner, repo, rev->to_string(Base16, false));
std::string accessToken = settings.githubAccessToken.get();
if (accessToken != "")
@ -140,7 +140,7 @@ struct GitHubInputScheme : InputScheme
if (path.size() == 2) {
} else if (path.size() == 3) {
if (std::regex_match(path[2], revRegex))
input->rev = Hash(path[2], HashType::SHA1);
input->rev = Hash(path[2], htSHA1);
else if (std::regex_match(path[2], refRegex))
input->ref = path[2];
else
@ -152,7 +152,7 @@ struct GitHubInputScheme : InputScheme
if (name == "rev") {
if (input->rev)
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
input->rev = Hash(value, HashType::SHA1);
input->rev = Hash(value, htSHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
@ -185,7 +185,7 @@ struct GitHubInputScheme : InputScheme
input->repo = getStrAttr(attrs, "repo");
input->ref = maybeGetStrAttr(attrs, "ref");
if (auto rev = maybeGetStrAttr(attrs, "rev"))
input->rev = Hash(*rev, HashType::SHA1);
input->rev = Hash(*rev, htSHA1);
return input;
}
};

View file

@ -8,4 +8,4 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
libfetchers_LIBS = libutil libstore libnixrust
libfetchers_LIBS = libutil libstore

View file

@ -114,7 +114,7 @@ struct MercurialInput : Input
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter);
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
return {Tree {
.actualPath = store->printStorePath(storePath),
@ -167,14 +167,14 @@ struct MercurialInput : Input
});
if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1);
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
if (!rev || rev == rev2) {
input->rev = rev2;
return makeResult(res->first, std::move(res->second));
}
}
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false));
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
/* If this is a commit hash that we already have, we don't
have to pull again. */
@ -184,7 +184,7 @@ struct MercurialInput : Input
RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
.killStderr(true)).second == "1"))
{
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Mercurial repository '%s'", actualUrl));
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
if (pathExists(cacheDir)) {
try {
@ -210,7 +210,7 @@ struct MercurialInput : Input
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3);
input->rev = Hash(tokens[0], HashType::SHA1);
input->rev = Hash(tokens[0], htSHA1);
auto revCount = std::stoull(tokens[1]);
input->ref = tokens[2];
@ -293,7 +293,7 @@ struct MercurialInputScheme : InputScheme
input->ref = *ref;
}
if (auto rev = maybeGetStrAttr(attrs, "rev"))
input->rev = Hash(*rev, HashType::SHA1);
input->rev = Hash(*rev, htSHA1);
return input;
}
};

View file

@ -101,7 +101,7 @@ struct PathInputScheme : InputScheme
for (auto & [name, value] : url.query)
if (name == "rev")
input->rev = Hash(value, HashType::SHA1);
input->rev = Hash(value, htSHA1);
else if (name == "revCount") {
uint64_t revCount;
if (!string2Int(value, revCount))
@ -129,7 +129,7 @@ struct PathInputScheme : InputScheme
for (auto & [name, value] : attrs)
if (name == "rev")
input->rev = Hash(getStrAttr(attrs, "rev"), HashType::SHA1);
input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
else if (name == "revCount")
input->revCount = getIntAttr(attrs, "revCount");
else if (name == "lastModified")

View file

@ -66,9 +66,9 @@ DownloadFileResult downloadFile(
} else {
StringSink sink;
dumpString(*res.data, sink);
auto hash = hashString(HashType::SHA256, *res.data);
auto hash = hashString(htSHA256, *res.data);
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
info.narHash = hashString(HashType::SHA256, *sink.s);
info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size();
info.ca = FileSystemHash {
FileIngestionMethod::Flat,
@ -145,7 +145,7 @@ Tree downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, NoRepair);
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({
@ -199,9 +199,9 @@ struct TarballInput : Input
// NAR hashes are preferred over file hashes since tar/zip files
// don't have a canonical representation.
if (narHash)
url2.query.insert_or_assign("narHash", narHash->to_string(Base::SRI));
url2.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
else if (hash)
url2.query.insert_or_assign("hash", hash->to_string(Base::SRI));
url2.query.insert_or_assign("hash", hash->to_string(SRI, true));
return url2;
}
@ -210,7 +210,7 @@ struct TarballInput : Input
Attrs attrs;
attrs.emplace("url", url.to_string());
if (hash)
attrs.emplace("hash", hash->to_string(Base::SRI));
attrs.emplace("hash", hash->to_string(SRI, true));
return attrs;
}
@ -267,8 +267,7 @@ struct TarballInputScheme : InputScheme
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
if (auto hash = maybeGetStrAttr(attrs, "hash"))
// FIXME: require SRI hash.
input->hash = Hash(*hash);
input->hash = newHashAllowEmpty(*hash, {});
return input;
}

View file

@ -1,5 +1,6 @@
#include "common-args.hh"
#include "globals.hh"
#include "loggers.hh"
namespace nix {
@ -10,21 +11,19 @@ MixCommonArgs::MixCommonArgs(const string & programName)
.longName = "verbose",
.shortName = 'v',
.description = "increase verbosity level",
.handler = {[]() { verbosity = (Verbosity) ((uint64_t) verbosity + 1); }},
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
});
addFlag({
.longName = "quiet",
.description = "decrease verbosity level",
.handler = {[]() { verbosity = verbosity > Verbosity::Error
? (Verbosity) ((uint64_t) verbosity - 1)
: Verbosity::Error; }},
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
});
addFlag({
.longName = "debug",
.description = "enable debug output",
.handler = {[]() { verbosity = Verbosity::Debug; }},
.handler = {[]() { verbosity = lvlDebug; }},
});
addFlag({
@ -40,6 +39,14 @@ MixCommonArgs::MixCommonArgs(const string & programName)
}},
});
addFlag({
.longName = "log-format",
.description = "format of log output; \"raw\", \"internal-json\", \"bar\" "
"or \"bar-with-logs\"",
.labels = {"format"},
.handler = {[](std::string format) { setLogFormat(format); }},
});
addFlag({
.longName = "max-jobs",
.shortName = 'j',

52
src/libmain/loggers.cc Normal file
View file

@ -0,0 +1,52 @@
#include "loggers.hh"
#include "progress-bar.hh"
namespace nix {
LogFormat defaultLogFormat = LogFormat::raw;
LogFormat parseLogFormat(const std::string & logFormatStr) {
if (logFormatStr == "raw")
return LogFormat::raw;
else if (logFormatStr == "raw-with-logs")
return LogFormat::rawWithLogs;
else if (logFormatStr == "internal-json")
return LogFormat::internalJson;
else if (logFormatStr == "bar")
return LogFormat::bar;
else if (logFormatStr == "bar-with-logs")
return LogFormat::barWithLogs;
throw Error("option 'log-format' has an invalid value '%s'", logFormatStr);
}
Logger * makeDefaultLogger() {
switch (defaultLogFormat) {
case LogFormat::raw:
return makeSimpleLogger(false);
case LogFormat::rawWithLogs:
return makeSimpleLogger(true);
case LogFormat::internalJson:
return makeJSONLogger(*makeSimpleLogger());
case LogFormat::bar:
return makeProgressBar();
case LogFormat::barWithLogs:
return makeProgressBar(true);
default:
abort();
}
}
void setLogFormat(const std::string & logFormatStr) {
setLogFormat(parseLogFormat(logFormatStr));
}
void setLogFormat(const LogFormat & logFormat) {
defaultLogFormat = logFormat;
createDefaultLogger();
}
void createDefaultLogger() {
logger = makeDefaultLogger();
}
}

20
src/libmain/loggers.hh Normal file
View file

@ -0,0 +1,20 @@
#pragma once
#include "types.hh"
namespace nix {
enum class LogFormat {
raw,
rawWithLogs,
internalJson,
bar,
barWithLogs,
};
void setLogFormat(const std::string & logFormatStr);
void setLogFormat(const LogFormat & logFormat);
void createDefaultLogger();
}

View file

@ -39,7 +39,7 @@ private:
struct ActInfo
{
std::string s, lastLine, phase;
ActivityType type = ActivityType::Unknown;
ActivityType type = actUnknown;
uint64_t done = 0;
uint64_t expected = 0;
uint64_t running = 0;
@ -106,25 +106,36 @@ public:
updateThread.join();
}
void stop()
void stop() override
{
auto state(state_.lock());
if (!state->active) return;
state->active = false;
std::string status = getStatus(*state);
writeToStderr("\r\e[K");
if (status != "")
writeToStderr("[" + status + "]\n");
updateCV.notify_one();
quitCV.notify_one();
}
bool isVerbose() override {
return printBuildLogs;
}
void log(Verbosity lvl, const FormatOrString & fs) override
{
auto state(state_.lock());
log(*state, lvl, fs.s);
}
void logEI(const ErrorInfo &ei) override
{
auto state(state_.lock());
std::stringstream oss;
oss << ei;
log(*state, ei.level, oss.str());
}
void log(State & state, Verbosity lvl, const std::string & s)
{
if (state.active) {
@ -142,7 +153,7 @@ public:
{
auto state(state_.lock());
if (lvl <= verbosity && !s.empty())
if (lvl <= verbosity && !s.empty() && type != actBuildWaiting)
log(*state, lvl, s + "...");
state->activities.emplace_back(ActInfo());
@ -153,8 +164,8 @@ public:
state->its.emplace(act, i);
state->activitiesByType[type].its.emplace(act, i);
if (type == ActivityType::Build) {
auto name = storePathToName(getS(fields, 0));
if (type == actBuild) {
std::string name(storePathToName(getS(fields, 0)));
if (hasSuffix(name, ".drv"))
name = name.substr(0, name.size() - 4);
i->s = fmt("building " ANSI_BOLD "%s" ANSI_NORMAL, name);
@ -168,7 +179,7 @@ public:
i->name = DrvName(name).name;
}
if (type == ActivityType::Substitute) {
if (type == actSubstitute) {
auto name = storePathToName(getS(fields, 0));
auto sub = getS(fields, 1);
i->s = fmt(
@ -178,7 +189,7 @@ public:
name, sub);
}
if (type == ActivityType::PostBuildHook) {
if (type == actPostBuildHook) {
auto name = storePathToName(getS(fields, 0));
if (hasSuffix(name, ".drv"))
name = name.substr(0, name.size() - 4);
@ -186,14 +197,14 @@ public:
i->name = DrvName(name).name;
}
if (type == ActivityType::QueryPathInfo) {
if (type == actQueryPathInfo) {
auto name = storePathToName(getS(fields, 0));
i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1));
}
if ((type == ActivityType::Download && hasAncestor(*state, ActivityType::CopyPath, parent))
|| (type == ActivityType::Download && hasAncestor(*state, ActivityType::QueryPathInfo, parent))
|| (type == ActivityType::CopyPath && hasAncestor(*state, ActivityType::Substitute, parent)))
if ((type == actFileTransfer && hasAncestor(*state, actCopyPath, parent))
|| (type == actFileTransfer && hasAncestor(*state, actQueryPathInfo, parent))
|| (type == actCopyPath && hasAncestor(*state, actSubstitute, parent)))
i->visible = false;
update(*state);
@ -238,13 +249,13 @@ public:
{
auto state(state_.lock());
if (type == ResultType::FileLinked) {
if (type == resFileLinked) {
state->filesLinked++;
state->bytesLinked += getI(fields, 0);
update(*state);
}
else if (type == ResultType::BuildLogLine || type == ResultType::PostBuildLogLine) {
else if (type == resBuildLogLine || type == resPostBuildLogLine) {
auto lastLine = trim(getS(fields, 0));
if (!lastLine.empty()) {
auto i = state->its.find(act);
@ -252,10 +263,10 @@ public:
ActInfo info = *i->second;
if (printBuildLogs) {
auto suffix = "> ";
if (type == ResultType::PostBuildLogLine) {
if (type == resPostBuildLogLine) {
suffix = " (post)> ";
}
log(*state, Verbosity::Info, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine);
log(*state, lvlInfo, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine);
} else {
state->activities.erase(i->second);
info.lastLine = lastLine;
@ -266,24 +277,24 @@ public:
}
}
else if (type == ResultType::UntrustedPath) {
else if (type == resUntrustedPath) {
state->untrustedPaths++;
update(*state);
}
else if (type == ResultType::CorruptedPath) {
else if (type == resCorruptedPath) {
state->corruptedPaths++;
update(*state);
}
else if (type == ResultType::SetPhase) {
else if (type == resSetPhase) {
auto i = state->its.find(act);
assert(i != state->its.end());
i->second->phase = getS(fields, 0);
update(*state);
}
else if (type == ResultType::Progress) {
else if (type == resProgress) {
auto i = state->its.find(act);
assert(i != state->its.end());
ActInfo & actInfo = *i->second;
@ -294,7 +305,7 @@ public:
update(*state);
}
else if (type == ResultType::SetExpected) {
else if (type == resSetExpected) {
auto i = state->its.find(act);
assert(i != state->its.end());
ActInfo & actInfo = *i->second;
@ -406,10 +417,10 @@ public:
res += s;
};
showActivity(ActivityType::Builds, "%s built");
showActivity(actBuilds, "%s built");
auto s1 = renderActivity(ActivityType::CopyPaths, "%s copied");
auto s2 = renderActivity(ActivityType::CopyPath, "%s MiB", "%.1f", MiB);
auto s1 = renderActivity(actCopyPaths, "%s copied");
auto s2 = renderActivity(actCopyPath, "%s MiB", "%.1f", MiB);
if (!s1.empty() || !s2.empty()) {
if (!res.empty()) res += ", ";
@ -417,10 +428,10 @@ public:
if (!s2.empty()) { res += " ("; res += s2; res += ')'; }
}
showActivity(ActivityType::Download, "%s MiB DL", "%.1f", MiB);
showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB);
{
auto s = renderActivity(ActivityType::OptimiseStore, "%s paths optimised");
auto s = renderActivity(actOptimiseStore, "%s paths optimised");
if (s != "") {
s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked);
if (!res.empty()) res += ", ";
@ -429,7 +440,7 @@ public:
}
// FIXME: don't show "done" paths in green.
showActivity(ActivityType::VerifyPaths, "%s paths verified");
showActivity(actVerifyPaths, "%s paths verified");
if (state.corruptedPaths) {
if (!res.empty()) res += ", ";
@ -457,11 +468,17 @@ public:
}
};
Logger * makeProgressBar(bool printBuildLogs)
{
return new ProgressBar(
printBuildLogs,
isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb"
);
}
void startProgressBar(bool printBuildLogs)
{
logger = new ProgressBar(
printBuildLogs,
isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb");
logger = makeProgressBar(printBuildLogs);
}
void stopProgressBar()

View file

@ -4,6 +4,8 @@
namespace nix {
Logger * makeProgressBar(bool printBuildLogs = false);
void startProgressBar(bool printBuildLogs = false);
void stopProgressBar();

View file

@ -2,6 +2,7 @@
#include "shared.hh"
#include "store-api.hh"
#include "util.hh"
#include "loggers.hh"
#include <algorithm>
#include <cctype>
@ -47,7 +48,10 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl)
{
if (!willBuild.empty()) {
printMsg(lvl, "these derivations will be built:");
if (willBuild.size() == 1)
printMsg(lvl, fmt("this derivation will be built:"));
else
printMsg(lvl, fmt("these %d derivations will be built:", willBuild.size()));
auto sorted = store->topoSortPaths(willBuild);
reverse(sorted.begin(), sorted.end());
for (auto & i : sorted)
@ -55,9 +59,18 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
}
if (!willSubstitute.empty()) {
printMsg(lvl, fmt("these paths will be fetched (%.2f MiB download, %.2f MiB unpacked):",
downloadSize / (1024.0 * 1024.0),
narSize / (1024.0 * 1024.0)));
const float downloadSizeMiB = downloadSize / (1024.f * 1024.f);
const float narSizeMiB = narSize / (1024.f * 1024.f);
if (willSubstitute.size() == 1) {
printMsg(lvl, fmt("this path will be fetched (%.2f MiB download, %.2f MiB unpacked):",
downloadSizeMiB,
narSizeMiB));
} else {
printMsg(lvl, fmt("these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):",
willSubstitute.size(),
downloadSizeMiB,
narSizeMiB));
}
for (auto & i : willSubstitute)
printMsg(lvl, fmt(" %s", store->printStorePath(i)));
}
@ -75,7 +88,7 @@ string getArg(const string & opt,
Strings::iterator & i, const Strings::iterator & end)
{
++i;
if (i == end) throw UsageError(format("'%1%' requires an argument") % opt);
if (i == end) throw UsageError("'%1%' requires an argument", opt);
return *i;
}
@ -169,7 +182,7 @@ LegacyArgs::LegacyArgs(const std::string & programName,
.longName = "no-build-output",
.shortName = 'Q',
.description = "do not show build output",
.handler = {&settings.verboseBuild, false},
.handler = {[&]() {setLogFormat(LogFormat::raw); }},
});
addFlag({
@ -234,7 +247,7 @@ bool LegacyArgs::processArgs(const Strings & args, bool finish)
Strings ss(args);
auto pos = ss.begin();
if (!parseArg(pos, ss.end()))
throw UsageError(format("unexpected argument '%1%'") % args.front());
throw UsageError("unexpected argument '%1%'", args.front());
return true;
}
@ -256,7 +269,7 @@ void parseCmdLine(const string & programName, const Strings & args,
void printVersion(const string & programName)
{
std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl;
if (verbosity > Verbosity::Info) {
if (verbosity > lvlInfo) {
Strings cfg;
#if HAVE_BOEHMGC
cfg.push_back("gc");
@ -281,7 +294,7 @@ void showManPage(const string & name)
restoreSignals();
setenv("MANPATH", settings.nixManDir.c_str(), 1);
execlp("man", "man", name.c_str(), nullptr);
throw SysError(format("command 'man %1%' failed") % name.c_str());
throw SysError("command 'man %1%' failed", name.c_str());
}
@ -289,6 +302,8 @@ int handleExceptions(const string & programName, std::function<void()> fun)
{
ReceiveInterrupts receiveInterrupts; // FIXME: need better place for this
ErrorInfo::programName = baseNameOf(programName);
string error = ANSI_RED "error:" ANSI_NORMAL " ";
try {
try {
@ -304,12 +319,13 @@ int handleExceptions(const string & programName, std::function<void()> fun)
} catch (Exit & e) {
return e.status;
} catch (UsageError & e) {
printError(
format(error + "%1%\nTry '%2% --help' for more information.")
% e.what() % programName);
logError(e.info());
printError("Try '%1% --help' for more information.", programName);
return 1;
} catch (BaseError & e) {
printError(format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg());
if (settings.showTrace && e.prefix() != "")
printError(e.prefix());
logError(e.info());
if (e.prefix() != "" && !settings.showTrace)
printError("(use '--show-trace' to show detailed location information)");
return e.status;
@ -346,7 +362,7 @@ RunPager::RunPager()
execlp("pager", "pager", nullptr);
execlp("less", "less", nullptr);
execlp("more", "more", nullptr);
throw SysError(format("executing '%1%'") % pager);
throw SysError("executing '%1%'", pager);
});
pid.setKillSignal(SIGINT);

View file

@ -43,11 +43,11 @@ struct StorePathWithOutputs;
void printMissing(
ref<Store> store,
const std::vector<StorePathWithOutputs> & paths,
Verbosity lvl = Verbosity::Info);
Verbosity lvl = lvlInfo);
void printMissing(ref<Store> store, const StorePathSet & willBuild,
const StorePathSet & willSubstitute, const StorePathSet & unknown,
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = Verbosity::Info);
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = lvlInfo);
string getArg(const string & opt,
Strings::iterator & i, const Strings::iterator & end);
@ -56,7 +56,7 @@ template<class N> N getIntArg(const string & opt,
Strings::iterator & i, const Strings::iterator & end, bool allowUnit)
{
++i;
if (i == end) throw UsageError(format("'%1%' requires an argument") % opt);
if (i == end) throw UsageError("'%1%' requires an argument", opt);
string s = *i;
N multiplier = 1;
if (allowUnit && !s.empty()) {
@ -66,13 +66,13 @@ template<class N> N getIntArg(const string & opt,
else if (u == 'M') multiplier = 1ULL << 20;
else if (u == 'G') multiplier = 1ULL << 30;
else if (u == 'T') multiplier = 1ULL << 40;
else throw UsageError(format("invalid unit specifier '%1%'") % u);
else throw UsageError("invalid unit specifier '%1%'", u);
s.resize(s.size() - 1);
}
}
N n;
if (!string2Int(s, n))
throw UsageError(format("'%1%' requires an integer argument") % opt);
throw UsageError("'%1%' requires an integer argument", opt);
return n * multiplier;
}

View file

@ -1,4 +1,4 @@
#include "types.hh"
#include "error.hh"
#include <cstring>
#include <cstddef>

View file

@ -46,8 +46,8 @@ void BinaryCacheStore::init()
auto value = trim(line.substr(colon + 1, std::string::npos));
if (name == "StoreDir") {
if (value != storeDir)
throw Error(format("binary cache '%s' is for Nix stores with prefix '%s', not '%s'")
% getUri() % value % storeDir);
throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'",
getUri(), value, storeDir);
} else if (name == "WantMassQuery") {
wantMassQuery.setDefault(value == "1" ? "true" : "false");
} else if (name == "Priority") {
@ -93,7 +93,7 @@ std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path)
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
{
return storePathToHash(printStorePath(storePath)) + ".narinfo";
return std::string(storePath.hashPart()) + ".narinfo";
}
void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
@ -102,7 +102,7 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo");
auto hashPart = storePathToHash(printStorePath(narInfo->path));
std::string hashPart(narInfo->path.hashPart());
{
auto state_(state.lock());
@ -137,7 +137,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
auto narInfo = make_ref<NarInfo>(info);
narInfo->narSize = nar->size();
narInfo->narHash = hashString(HashType::SHA256, *nar);
narInfo->narHash = hashString(htSHA256, *nar);
if (info.narHash && info.narHash != narInfo->narHash)
throw Error("refusing to copy corrupted path '%1%' to binary cache", printStorePath(info.path));
@ -164,7 +164,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
}
upsertFile(storePathToHash(printStorePath(info.path)) + ".ls", jsonOut.str(), "application/json");
upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json");
}
/* Compress the NAR. */
@ -172,16 +172,16 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
auto now1 = std::chrono::steady_clock::now();
auto narCompressed = compress(compression, *nar, parallelCompression);
auto now2 = std::chrono::steady_clock::now();
narInfo->fileHash = hashString(HashType::SHA256, *narCompressed);
narInfo->fileHash = hashString(htSHA256, *narCompressed);
narInfo->fileSize = narCompressed->size();
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
printMsg(Verbosity::Talkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache",
printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache",
printStorePath(narInfo->path), narInfo->narSize,
((1.0 - (double) narCompressed->size() / nar->size()) * 100.0),
duration);
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base::Base32, false) + ".nar"
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "br" ? ".br" :
@ -209,7 +209,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
// to a GC'ed file, so overwriting might be useful...
if (fileExists(key)) return;
printMsg(Verbosity::Talkative, "creating debuginfo link from '%s' to '%s'", key, target);
printMsg(lvlTalkative, "creating debuginfo link from '%s' to '%s'", key, target);
upsertFile(key, json.dump(), "application/json");
};
@ -287,7 +287,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
try {
getFile(info->url, *decompressor);
} catch (NoSuchBinaryCacheFile & e) {
throw SubstituteGone(e.what());
throw SubstituteGone(e.info());
}
decompressor->finish();
@ -302,7 +302,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
{
auto uri = getUri();
auto storePathS = printStorePath(storePath);
auto act = std::make_shared<Activity>(*logger, Verbosity::Talkative, ActivityType::QueryPathInfo,
auto act = std::make_shared<Activity>(*logger, lvlTalkative, actQueryPathInfo,
fmt("querying info about '%s' on '%s'", storePathS, uri), Logger::Fields{storePathS, uri});
PushActivity pact(act->id);
@ -360,7 +360,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
const StorePathSet & references, RepairFlag repair)
{
ValidPathInfo info(computeStorePathForText(name, s, references));
info.references = cloneStorePathSet(references);
info.references = references;
if (repair || !isValidPath(info.path)) {
StringSink sink;
@ -388,21 +388,19 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
narInfo->sigs.insert(sigs.begin(), sigs.end());
auto narInfoFile = narInfoFileFor(narInfo->path);
writeNarInfo(narInfo);
}
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
{
auto drvPath = path.clone();
auto drvPath = path;
if (!path.isDerivation()) {
try {
auto info = queryPathInfo(path);
// FIXME: add a "Log" field to .narinfo
if (!info->deriver) return nullptr;
drvPath = info->deriver->clone();
drvPath = *info->deriver;
} catch (InvalidPath &) {
return nullptr;
}

File diff suppressed because it is too large Load diff

View file

@ -22,7 +22,10 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
srcFiles = readDirectory(srcDir);
} catch (SysError & e) {
if (e.errNo == ENOTDIR) {
printError("warning: not including '%s' in the user environment because it's not a directory", srcDir);
logWarning({
.name = "Create links - directory",
.hint = hintfmt("not including '%s' in the user environment because it's not a directory", srcDir)
});
return;
}
throw;
@ -41,7 +44,10 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
throw SysError("getting status of '%1%'", srcFile);
} catch (SysError & e) {
if (e.errNo == ENOENT || e.errNo == ENOTDIR) {
printError("warning: skipping dangling symlink '%s'", dstFile);
logWarning({
.name = "Create links - skipping symlink",
.hint = hintfmt("skipping dangling symlink '%s'", dstFile)
});
continue;
}
throw;
@ -72,15 +78,15 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
if (!S_ISDIR(lstat(target).st_mode))
throw Error("collision between '%1%' and non-directory '%2%'", srcFile, target);
if (unlink(dstFile.c_str()) == -1)
throw SysError(format("unlinking '%1%'") % dstFile);
throw SysError("unlinking '%1%'", dstFile);
if (mkdir(dstFile.c_str(), 0755) == -1)
throw SysError(format("creating directory '%1%'"));
throw SysError("creating directory '%1%'", dstFile);
createLinks(state, target, dstFile, state.priorities[dstFile]);
createLinks(state, srcFile, dstFile, priority);
continue;
}
} else if (errno != ENOENT)
throw SysError(format("getting status of '%1%'") % dstFile);
throw SysError("getting status of '%1%'", dstFile);
}
else {
@ -99,11 +105,11 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
if (prevPriority < priority)
continue;
if (unlink(dstFile.c_str()) == -1)
throw SysError(format("unlinking '%1%'") % dstFile);
throw SysError("unlinking '%1%'", dstFile);
} else if (S_ISDIR(dstSt.st_mode))
throw Error("collision between non-directory '%1%' and directory '%2%'", srcFile, dstFile);
} else if (errno != ENOENT)
throw SysError(format("getting status of '%1%'") % dstFile);
throw SysError("getting status of '%1%'", dstFile);
}
createSymlink(srcFile, dstFile);

View file

@ -18,7 +18,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
auto getAttr = [&](const string & name) {
auto i = drv.env.find(name);
if (i == drv.env.end()) throw Error(format("attribute '%s' missing") % name);
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
return i->second;
};
@ -54,7 +54,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
auto executable = drv.env.find("executable");
if (executable != drv.env.end() && executable->second == "1") {
if (chmod(storePath.c_str(), 0755) == -1)
throw SysError(format("making '%1%' executable") % storePath);
throw SysError("making '%1%' executable", storePath);
}
};
@ -65,7 +65,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
auto ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
auto h = Hash(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base::Base16, false));
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
return;
} catch (Error & e) {
debug(e.what());

View file

@ -21,7 +21,7 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
{
return "fixed:"
+ makeFileIngestionPrefix(method)
+ hash.to_string();
+ hash.to_string(Base32, true);
}
// FIXME Put this somewhere?
@ -31,7 +31,7 @@ template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string renderContentAddress(ContentAddress ca) {
return std::visit(overloaded {
[](TextHash th) {
return "text:" + th.hash.to_string();
return "text:" + th.hash.to_string(Base::Base32, true);
},
[](FileSystemHash fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
@ -46,7 +46,7 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
if (prefix == "text") {
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
Hash hash = Hash(string(hashTypeAndHash));
if (*hash.type != HashType::SHA256) {
if (*hash.type != htSHA256) {
throw Error("parseContentAddress: the text hash should have type SHA256");
}
return TextHash { hash };

View file

@ -73,6 +73,18 @@ struct TunnelLogger : public Logger
enqueueMsg(*buf.s);
}
void logEI(const ErrorInfo & ei) override
{
if (ei.level > verbosity) return;
std::stringstream oss;
oss << ei;
StringSink buf;
buf << STDERR_NEXT << oss.str() << "\n"; // (fs.s + "\n");
enqueueMsg(*buf.s);
}
/* startWork() means that we're starting an operation for which we
want to send out stderr to the client. */
void startWork()
@ -114,13 +126,7 @@ struct TunnelLogger : public Logger
}
StringSink buf;
buf << STDERR_START_ACTIVITY
<< act
<< (uint64_t) lvl
<< (uint64_t) type
<< s
<< fields
<< parent;
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
enqueueMsg(*buf.s);
}
@ -136,10 +142,7 @@ struct TunnelLogger : public Logger
{
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
StringSink buf;
buf << STDERR_RESULT
<< act
<< (uint64_t) type
<< fields;
buf << STDERR_RESULT << act << type << fields;
enqueueMsg(*buf.s);
}
};
@ -290,7 +293,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
StorePathSet paths; // FIXME
paths.insert(path.clone());
paths.insert(path);
auto res = store->querySubstitutablePaths(paths);
logger->stopWork();
to << (res.count(path) != 0);
@ -311,7 +314,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork();
to << hash.to_string(Base::Base16, false);
to << hash.to_string(Base16, false);
break;
}
@ -324,7 +327,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
StorePathSet paths;
if (op == wopQueryReferences)
for (auto & i : store->queryPathInfo(path)->references)
paths.insert(i.clone());
paths.insert(i);
else if (op == wopQueryReferrers)
store->queryReferrers(path, paths);
else if (op == wopQueryValidDerivers)
@ -338,8 +341,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopQueryDerivationOutputNames: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
StringSet names;
names = store->queryDerivationOutputNames(path);
auto names = store->readDerivation(path).outputNames();
logger->stopWork();
to << names;
break;
@ -367,8 +369,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
std::string s, baseName;
FileIngestionMethod method;
{
bool fixed, recursive;
bool fixed; uint8_t recursive;
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
@ -559,7 +563,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
clientSettings.maxBuildJobs = readInt(from);
clientSettings.maxSilentTime = readInt(from);
readInt(from); // obsolete useBuildHook
clientSettings.verboseBuild = Verbosity::Error == (Verbosity) readInt(from);
clientSettings.verboseBuild = lvlError == (Verbosity) readInt(from);
readInt(from); // obsolete logType
readInt(from); // obsolete printBuildTrace
clientSettings.buildCores = readInt(from);
@ -589,9 +593,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
SubstitutablePathInfos infos;
StorePathSet paths;
paths.insert(path.clone()); // FIXME
store->querySubstitutablePathInfos(paths, infos);
store->querySubstitutablePathInfos({path}, infos);
logger->stopWork();
auto i = infos.find(path);
if (i == infos.end())
@ -644,7 +646,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base::Base16, false);
<< info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
@ -704,7 +706,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto deriver = readString(from);
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
info.narHash = Hash(readString(from), HashType::SHA256);
info.narHash = Hash(readString(from), htSHA256);
info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from);
@ -753,7 +755,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
default:
throw Error(format("invalid operation %1%") % op);
throw Error("invalid operation %1%", op);
}
}
@ -788,7 +790,7 @@ void processConnection(
Finally finally([&]() {
_isInterrupted = false;
prevLogger->log(Verbosity::Debug, fmt("%d operations", opCount));
prevLogger->log(lvlDebug, fmt("%d operations", opCount));
});
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) {

View file

@ -8,28 +8,6 @@
namespace nix {
BasicDerivation::BasicDerivation(const BasicDerivation & other)
: platform(other.platform)
, builder(other.builder)
, args(other.args)
, env(other.env)
{
for (auto & i : other.outputs)
outputs.insert_or_assign(i.first,
DerivationOutput(i.second.path.clone(), std::optional<FileSystemHash>(i.second.hash)));
for (auto & i : other.inputSrcs)
inputSrcs.insert(i.clone());
}
Derivation::Derivation(const Derivation & other)
: BasicDerivation(other)
{
for (auto & i : other.inputDrvs)
inputDrvs.insert_or_assign(i.first.clone(), i.second);
}
const StorePath & BasicDerivation::findOutput(const string & id) const
{
auto i = outputs.find(id);
@ -48,9 +26,9 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(ref<Store> store,
const Derivation & drv, std::string_view name, RepairFlag repair)
{
auto references = cloneStorePathSet(drv.inputSrcs);
auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs)
references.insert(i.first.clone());
references.insert(i.first);
/* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be
held during a garbage collection). */
@ -68,7 +46,7 @@ static void expect(std::istream & str, const string & s)
char s2[s.size()];
str.read(s2, s.size());
if (string(s2, s.size()) != s)
throw FormatError(format("expected string '%1%'") % s);
throw FormatError("expected string '%1%'", s);
}
@ -95,7 +73,7 @@ static Path parsePath(std::istream & str)
{
string s = parseString(str);
if (s.size() == 0 || s[0] != '/')
throw FormatError(format("bad path '%1%' in derivation") % s);
throw FormatError("bad path '%1%' in derivation", s);
return s;
}
@ -130,9 +108,9 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
auto method = FileIngestionMethod::Flat;
std::optional<FileSystemHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
@ -144,7 +122,10 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
};
}
return DerivationOutput(std::move(path), std::move(fsh));
return DerivationOutput {
.path = std::move(path),
.hash = std::move(fsh),
};
}
@ -157,7 +138,8 @@ static Derivation parseDerivation(const Store & store, const string & s)
/* Parse the list of outputs. */
while (!endOfList(str)) {
expect(str, "("); std::string id = parseString(str);
drv.outputs.emplace(id, parseDerivationOutput(store, str));
auto output = parseDerivationOutput(store, str);
drv.outputs.emplace(std::move(id), std::move(output));
}
/* Parse the list of input derivations. */
@ -198,7 +180,7 @@ Derivation readDerivation(const Store & store, const Path & drvPath)
try {
return parseDerivation(store, readFile(drvPath));
} catch (FormatError & e) {
throw Error(format("error parsing derivation '%1%': %2%") % drvPath % e.msg());
throw Error("error parsing derivation '%1%': %2%", drvPath, e.msg());
}
}
@ -206,6 +188,12 @@ Derivation readDerivation(const Store & store, const Path & drvPath)
Derivation Store::derivationFromPath(const StorePath & drvPath)
{
ensurePath(drvPath);
return readDerivation(drvPath);
}
Derivation Store::readDerivation(const StorePath & drvPath)
{
auto accessor = getFSAccessor();
try {
return parseDerivation(*this, accessor->readFile(printStorePath(drvPath)));
@ -367,9 +355,9 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
/* Return a fixed hash for fixed-output derivations. */
if (drv.isFixedOutput()) {
DerivationOutputs::const_iterator i = drv.outputs.begin();
return hashString(HashType::SHA256, "fixed:out:"
return hashString(htSHA256, "fixed:out:"
+ i->second.hash->printMethodAlgo() + ":"
+ i->second.hash->hash.to_string(Base::Base16, false) + ":"
+ i->second.hash->hash.to_string(Base16, false) + ":"
+ store.printStorePath(i->second.path));
}
@ -380,13 +368,13 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
auto h = drvHashes.find(i.first);
if (h == drvHashes.end()) {
assert(store.isValidPath(i.first));
h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store,
readDerivation(store, store.toRealPath(i.first)), false)).first;
h = drvHashes.insert_or_assign(i.first, hashDerivationModulo(store,
store.readDerivation(i.first), false)).first;
}
inputs2.insert_or_assign(h->second.to_string(Base::Base16, false), i.second);
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second);
}
return hashString(HashType::SHA256, drv.unparse(store, maskOutputs, &inputs2));
return hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
}
@ -408,7 +396,7 @@ StorePathSet BasicDerivation::outputPaths() const
{
StorePathSet paths;
for (auto & i : outputs)
paths.insert(i.second.path.clone());
paths.insert(i.second.path);
return paths;
}
@ -418,23 +406,35 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
auto hashAlgo = readString(in);
const auto hash = readString(in);
auto method = FileIngestionMethod::Flat;
std::optional<FileSystemHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
HashType hashType = parseHashType(hashAlgo);
const HashType hashType = parseHashType(hashAlgo);
fsh = FileSystemHash {
std::move(method),
Hash(hash, hashType),
};
}
return DerivationOutput(std::move(path), std::move(fsh));
return DerivationOutput {
.path = std::move(path),
.hash = std::move(fsh),
};
}
StringSet BasicDerivation::outputNames() const
{
StringSet names;
for (auto & i : outputs)
names.insert(i.first);
return names;
}
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
{
drv.outputs.clear();
@ -442,7 +442,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
for (size_t n = 0; n < nr; n++) {
auto name = readString(in);
auto output = readDerivationOutput(in, store);
drv.outputs.emplace(name, output);
drv.outputs.emplace(std::move(name), std::move(output));
}
drv.inputSrcs = readStorePaths<StorePathSet>(store, in);
@ -479,7 +479,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
std::string hashPlaceholder(const std::string & outputName)
{
// FIXME: memoize?
return "/" + hashString(HashType::SHA256, "nix-output:" + outputName).to_string(Base::Base32, false);
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
}

View file

@ -17,13 +17,6 @@ struct DerivationOutput
{
StorePath path;
std::optional<FileSystemHash> hash; /* hash used for expected hash computation */
DerivationOutput(StorePath && path, std::optional<FileSystemHash> && hash)
: path(std::move(path))
, hash(std::move(hash))
{ }
DerivationOutput(const DerivationOutput &) = default;
DerivationOutput(DerivationOutput &&) = default;
DerivationOutput & operator = (const DerivationOutput &) = default;
};
typedef std::map<string, DerivationOutput> DerivationOutputs;
@ -44,7 +37,6 @@ struct BasicDerivation
StringPairs env;
BasicDerivation() { }
explicit BasicDerivation(const BasicDerivation & other);
virtual ~BasicDerivation() { };
/* Return the path corresponding to the output identifier `id' in
@ -59,6 +51,8 @@ struct BasicDerivation
/* Return the output paths of a derivation. */
StorePathSet outputPaths() const;
/* Return the output names of a derivation. */
StringSet outputNames() const;
};
struct Derivation : BasicDerivation
@ -70,8 +64,6 @@ struct Derivation : BasicDerivation
std::map<std::string, StringSet> * actualInputs = nullptr) const;
Derivation() { }
Derivation(Derivation && other) = default;
explicit Derivation(const Derivation & other);
};

View file

@ -11,7 +11,7 @@ struct HashAndWriteSink : Sink
{
Sink & writeSink;
HashSink hashSink;
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(HashType::SHA256)
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256)
{
}
virtual void operator () (const unsigned char * data, size_t len)
@ -34,7 +34,7 @@ void Store::exportPaths(const StorePathSet & paths, Sink & sink)
//logger->incExpected(doneLabel, sorted.size());
for (auto & path : sorted) {
//Activity act(*logger, Verbosity::Info, format("exporting path '%s'") % path);
//Activity act(*logger, lvlInfo, format("exporting path '%s'") % path);
sink << 1;
exportPath(path, sink);
//logger->incProgress(doneLabel);
@ -57,7 +57,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
Hash hash = hashAndWriteSink.currentHash();
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(), hash.to_string());
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
hashAndWriteSink
<< exportMagic
@ -86,7 +86,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
ValidPathInfo info(parseStorePath(readString(source)));
//Activity act(*logger, Verbosity::Info, format("importing path '%s'") % info.path);
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
info.references = readStorePaths<StorePathSet>(*this, source);
@ -94,7 +94,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
if (deriver != "")
info.deriver = parseStorePath(deriver);
info.narHash = hashString(HashType::SHA256, *tee.source.data);
info.narHash = hashString(htSHA256, *tee.source.data);
info.narSize = tee.source.data->size();
// Ignore optional legacy signature.
@ -105,7 +105,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
auto source = StringSource { *tee.source.data };
addToStore(info, source, NoRepair, checkSigs, accessor);
res.push_back(info.path.clone());
res.push_back(info.path);
}
return res;

View file

@ -72,23 +72,34 @@ struct curlFileTransfer : public FileTransfer
curl_off_t writtenToSink = 0;
inline static const std::set<long> successfulStatuses {200, 201, 204, 206, 304, 0 /* other protocol */};
/* Get the HTTP status code, or 0 for other protocols. */
long getHTTPStatus()
{
long httpStatus = 0;
long protocol = 0;
curl_easy_getinfo(req, CURLINFO_PROTOCOL, &protocol);
if (protocol == CURLPROTO_HTTP || protocol == CURLPROTO_HTTPS)
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
return httpStatus;
}
TransferItem(curlFileTransfer & fileTransfer,
const FileTransferRequest & request,
Callback<FileTransferResult> && callback)
: fileTransfer(fileTransfer)
, request(request)
, act(*logger, Verbosity::Talkative, ActivityType::Download,
, act(*logger, lvlTalkative, actFileTransfer,
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
{request.uri}, request.parentAct)
, callback(std::move(callback))
, finalSink([this](const unsigned char * data, size_t len) {
if (this->request.dataCallback) {
long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
auto httpStatus = getHTTPStatus();
/* Only write data to the sink if this is a
successful response. */
if (httpStatus == 0 || httpStatus == 200 || httpStatus == 201 || httpStatus == 206) {
if (successfulStatuses.count(httpStatus)) {
writtenToSink += len;
this->request.dataCallback((char *) data, len);
}
@ -112,7 +123,7 @@ struct curlFileTransfer : public FileTransfer
if (requestHeaders) curl_slist_free_all(requestHeaders);
try {
if (!done)
fail(FileTransferError(Interrupted, format("download of '%s' was interrupted") % request.uri));
fail(FileTransferError(Interrupted, "download of '%s' was interrupted", request.uri));
} catch (...) {
ignoreException();
}
@ -163,7 +174,7 @@ struct curlFileTransfer : public FileTransfer
{
size_t realSize = size * nmemb;
std::string line((char *) contents, realSize);
printMsg(Verbosity::Vomit, format("got header for '%s': %s") % request.uri % trim(line));
printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line));
if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
result.etag = "";
auto ss = tokenizeString<vector<string>>(line, " ");
@ -246,7 +257,7 @@ struct curlFileTransfer : public FileTransfer
curl_easy_reset(req);
if (verbosity >= Verbosity::Vomit) {
if (verbosity >= lvlVomit) {
curl_easy_setopt(req, CURLOPT_VERBOSE, 1);
curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
}
@ -316,8 +327,7 @@ struct curlFileTransfer : public FileTransfer
void finish(CURLcode code)
{
long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
auto httpStatus = getHTTPStatus();
char * effectiveUriCStr;
curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
@ -343,8 +353,7 @@ struct curlFileTransfer : public FileTransfer
if (writeException)
failEx(writeException);
else if (code == CURLE_OK &&
(httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 206 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */))
else if (code == CURLE_OK && successfulStatuses.count(httpStatus))
{
result.cached = httpStatus == 304;
act.progress(result.bodySize, result.bodySize);
@ -517,7 +526,7 @@ struct curlFileTransfer : public FileTransfer
int running;
CURLMcode mc = curl_multi_perform(curlm, &running);
if (mc != CURLM_OK)
throw nix::Error(format("unexpected error from curl_multi_perform(): %s") % curl_multi_strerror(mc));
throw nix::Error("unexpected error from curl_multi_perform(): %s", curl_multi_strerror(mc));
/* Set the promises of any finished requests. */
CURLMsg * msg;
@ -547,7 +556,7 @@ struct curlFileTransfer : public FileTransfer
vomit("download thread waiting for %d ms", sleepTimeMs);
mc = curl_multi_wait(curlm, extraFDs, 1, sleepTimeMs, &numfds);
if (mc != CURLM_OK)
throw nix::Error(format("unexpected error from curl_multi_wait(): %s") % curl_multi_strerror(mc));
throw nix::Error("unexpected error from curl_multi_wait(): %s", curl_multi_strerror(mc));
nextWakeup = std::chrono::steady_clock::time_point();
@ -599,7 +608,11 @@ struct curlFileTransfer : public FileTransfer
workerThreadMain();
} catch (nix::Interrupted & e) {
} catch (std::exception & e) {
printError("unexpected error in download thread: %s", e.what());
logError({
.name = "File transfer",
.hint = hintfmt("unexpected error in download thread: %s",
e.what())
});
}
{

View file

@ -103,8 +103,9 @@ class FileTransferError : public Error
{
public:
FileTransfer::Error error;
FileTransferError(FileTransfer::Error error, const FormatOrString & fs)
: Error(fs), error(error)
template<typename... Args>
FileTransferError(FileTransfer::Error error, const Args & ... args)
: Error(args...), error(error)
{ }
};

View file

@ -38,10 +38,10 @@ AutoCloseFD LocalStore::openGCLock(LockType lockType)
AutoCloseFD fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600);
if (!fdGCLock)
throw SysError(format("opening global GC lock '%1%'") % fnGCLock);
throw SysError("opening global GC lock '%1%'", fnGCLock);
if (!lockFile(fdGCLock.get(), lockType, false)) {
printError(format("waiting for the big garbage collector lock..."));
printInfo("waiting for the big garbage collector lock...");
lockFile(fdGCLock.get(), lockType, true);
}
@ -65,8 +65,8 @@ static void makeSymlink(const Path & link, const Path & target)
/* Atomically replace the old one. */
if (rename(tempLink.c_str(), link.c_str()) == -1)
throw SysError(format("cannot rename '%1%' to '%2%'")
% tempLink % link);
throw SysError("cannot rename '%1%' to '%2%'",
tempLink , link);
}
@ -78,7 +78,7 @@ void LocalStore::syncWithGC()
void LocalStore::addIndirectRoot(const Path & path)
{
string hash = hashString(HashType::SHA1, path).to_string(Base::Base32, false);
string hash = hashString(htSHA1, path).to_string(Base32, false);
Path realRoot = canonPath((format("%1%/%2%/auto/%3%")
% stateDir % gcRootsDir % hash).str());
makeSymlink(realRoot, path);
@ -91,15 +91,15 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath,
Path gcRoot(canonPath(_gcRoot));
if (isInStore(gcRoot))
throw Error(format(
throw Error(
"creating a garbage collector root (%1%) in the Nix store is forbidden "
"(are you running nix-build inside the store?)") % gcRoot);
"(are you running nix-build inside the store?)", gcRoot);
if (indirect) {
/* Don't clobber the link if it already exists and doesn't
point to the Nix store. */
if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
throw Error(format("cannot create symlink '%1%'; already exists") % gcRoot);
throw Error("cannot create symlink '%1%'; already exists", gcRoot);
makeSymlink(gcRoot, printStorePath(storePath));
addIndirectRoot(gcRoot);
}
@ -109,10 +109,10 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath,
Path rootsDir = canonPath((format("%1%/%2%") % stateDir % gcRootsDir).str());
if (string(gcRoot, 0, rootsDir.size() + 1) != rootsDir + "/")
throw Error(format(
throw Error(
"path '%1%' is not a valid garbage collector root; "
"it's not in the directory '%2%'")
% gcRoot % rootsDir);
"it's not in the directory '%2%'",
gcRoot, rootsDir);
}
if (baseNameOf(gcRoot) == std::string(storePath.to_string()))
@ -128,11 +128,13 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath,
gcroots directory. */
if (settings.checkRootReachability) {
auto roots = findRoots(false);
if (roots[storePath.clone()].count(gcRoot) == 0)
printError(
"warning: '%1%' is not in a directory where the garbage collector looks for roots; "
if (roots[storePath].count(gcRoot) == 0)
logWarning({
.name = "GC root",
.hint = hintfmt("warning: '%1%' is not in a directory where the garbage collector looks for roots; "
"therefore, '%2%' might be removed by the garbage collector",
gcRoot, printStorePath(storePath));
gcRoot, printStorePath(storePath))
});
}
/* Grab the global GC root, causing us to block while a GC is in
@ -170,7 +172,7 @@ void LocalStore::addTempRoot(const StorePath & path)
way. */
struct stat st;
if (fstat(state->fdTempRoots.get(), &st) == -1)
throw SysError(format("statting '%1%'") % fnTempRoots);
throw SysError("statting '%1%'", fnTempRoots);
if (st.st_size == 0) break;
/* The garbage collector deleted this file before we could
@ -216,7 +218,7 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
if (!*fd) {
/* It's okay if the file has disappeared. */
if (errno == ENOENT) continue;
throw SysError(format("opening temporary roots file '%1%'") % path);
throw SysError("opening temporary roots file '%1%'", path);
}
/* This should work, but doesn't, for some reason. */
@ -227,7 +229,7 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
only succeed if the owning process has died. In that case
we don't care about its temporary roots. */
if (lockFile(fd->get(), ltWrite, false)) {
printError(format("removing stale temporary roots file '%1%'") % path);
printInfo("removing stale temporary roots file '%1%'", path);
unlink(path.c_str());
writeFull(fd->get(), "d");
continue;
@ -403,7 +405,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
if (!fdDir) {
if (errno == ENOENT || errno == EACCES)
continue;
throw SysError(format("opening %1%") % fdStr);
throw SysError("opening %1%", fdStr);
}
struct dirent * fd_ent;
while (errno = 0, fd_ent = readdir(fdDir.get())) {
@ -413,7 +415,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
if (errno) {
if (errno == ESRCH)
continue;
throw SysError(format("iterating /proc/%1%/fd") % ent->d_name);
throw SysError("iterating /proc/%1%/fd", ent->d_name);
}
fdDir.reset();
@ -476,9 +478,9 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
if (!isValidPath(path)) continue;
debug("got additional root '%1%'", pathS);
if (censor)
roots[path.clone()].insert(censored);
roots[path].insert(censored);
else
roots[path.clone()].insert(links.begin(), links.end());
roots[path].insert(links.begin(), links.end());
}
}
@ -541,7 +543,7 @@ void LocalStore::deletePathRecursive(GCState & state, const Path & path)
struct stat st;
if (lstat(realPath.c_str(), &st)) {
if (errno == ENOENT) return;
throw SysError(format("getting status of %1%") % realPath);
throw SysError("getting status of %1%", realPath);
}
printInfo(format("deleting '%1%'") % path);
@ -559,10 +561,10 @@ void LocalStore::deletePathRecursive(GCState & state, const Path & path)
// size.
try {
if (chmod(realPath.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError(format("making '%1%' writable") % realPath);
throw SysError("making '%1%' writable", realPath);
Path tmp = trashDir + "/" + std::string(baseNameOf(path));
if (rename(realPath.c_str(), tmp.c_str()))
throw SysError(format("unable to rename '%1%' to '%2%'") % realPath % tmp);
throw SysError("unable to rename '%1%' to '%2%'", realPath, tmp);
state.bytesInvalidated += size;
} catch (SysError & e) {
if (e.errNo == ENOSPC) {
@ -590,11 +592,11 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
if (state.roots.count(path)) {
debug("cannot delete '%1%' because it's a root", printStorePath(path));
state.alive.insert(path.clone());
state.alive.insert(path);
return true;
}
visited.insert(path.clone());
visited.insert(path);
if (!isValidPath(path)) return false;
@ -608,7 +610,7 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
if (state.gcKeepDerivations && path.isDerivation()) {
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
incoming.insert(i.clone());
incoming.insert(i);
}
/* If keep-outputs is set, then don't delete this path if there
@ -616,13 +618,13 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
if (state.gcKeepOutputs) {
auto derivers = queryValidDerivers(path);
for (auto & i : derivers)
incoming.insert(i.clone());
incoming.insert(i);
}
for (auto & i : incoming)
if (i != path)
if (canReachRoot(state, visited, i)) {
state.alive.insert(path.clone());
state.alive.insert(path);
return true;
}
@ -637,7 +639,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path)
auto realPath = realStoreDir + "/" + std::string(baseNameOf(path));
if (realPath == linksDir || realPath == trashDir) return;
//Activity act(*logger, Verbosity::Debug, format("considering whether to delete '%1%'") % path);
//Activity act(*logger, lvlDebug, format("considering whether to delete '%1%'") % path);
auto storePath = maybeParseStorePath(path);
@ -666,7 +668,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path)
nix-store --delete doesn't have the unexpected effect of
recursing into derivations and outputs. */
for (auto & i : visited)
state.dead.insert(i.clone());
state.dead.insert(i);
if (state.shouldDelete)
deletePathRecursive(state, path);
}
@ -681,7 +683,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path)
void LocalStore::removeUnusedLinks(const GCState & state)
{
AutoCloseDir dir(opendir(linksDir.c_str()));
if (!dir) throw SysError(format("opening directory '%1%'") % linksDir);
if (!dir) throw SysError("opening directory '%1%'", linksDir);
long long actualSize = 0, unsharedSize = 0;
@ -694,7 +696,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
struct stat st;
if (lstat(path.c_str(), &st) == -1)
throw SysError(format("statting '%1%'") % path);
throw SysError("statting '%1%'", path);
if (st.st_nlink != 1) {
actualSize += st.st_size;
@ -702,17 +704,17 @@ void LocalStore::removeUnusedLinks(const GCState & state)
continue;
}
printMsg(Verbosity::Talkative, format("deleting unused link '%1%'") % path);
printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
if (unlink(path.c_str()) == -1)
throw SysError(format("deleting '%1%'") % path);
throw SysError("deleting '%1%'", path);
state.results.bytesFreed += st.st_size;
}
struct stat st;
if (stat(linksDir.c_str(), &st) == -1)
throw SysError(format("statting '%1%'") % linksDir);
throw SysError("statting '%1%'", linksDir);
long long overhead = st.st_blocks * 512ULL;
printInfo(format("note: currently hard linking saves %.2f MiB")
@ -747,12 +749,12 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
/* Find the roots. Since we've grabbed the GC lock, the set of
permanent roots cannot increase now. */
printError("finding garbage collector roots...");
printInfo("finding garbage collector roots...");
Roots rootMap;
if (!options.ignoreLiveness)
findRootsNoTemp(rootMap, true);
for (auto & i : rootMap) state.roots.insert(i.first.clone());
for (auto & i : rootMap) state.roots.insert(i.first);
/* Read the temporary roots. This acquires read locks on all
per-process temporary root files. So after this point no paths
@ -761,8 +763,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
Roots tempRoots;
findTempRoots(fds, tempRoots, true);
for (auto & root : tempRoots) {
state.tempRoots.insert(root.first.clone());
state.roots.insert(root.first.clone());
state.tempRoots.insert(root.first);
state.roots.insert(root.first);
}
/* After this point the set of roots or temporary roots cannot
@ -799,14 +801,14 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
} else if (options.maxFreed > 0) {
if (state.shouldDelete)
printError("deleting garbage...");
printInfo("deleting garbage...");
else
printError("determining live/dead paths...");
printInfo("determining live/dead paths...");
try {
AutoCloseDir dir(opendir(realStoreDir.c_str()));
if (!dir) throw SysError(format("opening directory '%1%'") % realStoreDir);
if (!dir) throw SysError("opening directory '%1%'", realStoreDir);
/* Read the store and immediately delete all paths that
aren't valid. When using --max-freed etc., deleting
@ -868,7 +870,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
/* Clean up the links directory. */
if (options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific) {
printError("deleting unused links...");
printInfo("deleting unused links...");
removeUnusedLinks(state);
}

View file

@ -271,7 +271,7 @@ public:
"listed in 'trusted-public-keys'."};
Setting<StringSet> extraPlatforms{this,
std::string{SYSTEM} == "x86_64-linux" ? StringSet{"i686-linux"} : StringSet{},
std::string{SYSTEM} == "x86_64-linux" && !isWSL1() ? StringSet{"i686-linux"} : StringSet{},
"extra-platforms",
"Additional platforms that can be built on the local system. "
"These may be supported natively (e.g. armv7 on some aarch64 CPUs "

View file

@ -139,7 +139,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base::Base16, false);
<< info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to
<< info.registrationTime
@ -256,7 +256,7 @@ struct LegacySSHStore : public Store
conn->to.flush();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from))
out.insert(i.clone());
out.insert(i);
}
StorePathSet queryValidPaths(const StorePathSet & paths,

View file

@ -74,7 +74,7 @@ static void atomicWrite(const Path & path, const std::string & s)
AutoDelete del(tmp, false);
writeFile(tmp, s);
if (rename(tmp.c_str(), path.c_str()))
throw SysError(format("renaming '%1%' to '%2%'") % tmp % path);
throw SysError("renaming '%1%' to '%2%'", tmp, path);
del.cancel();
}

View file

@ -22,7 +22,7 @@ struct LocalStoreAccessor : public FSAccessor
{
Path storePath = store->toStorePath(path);
if (!store->isValidPath(store->parseStorePath(storePath)))
throw InvalidPath(format("path '%1%' is not a valid store path") % storePath);
throw InvalidPath("path '%1%' is not a valid store path", storePath);
return store->getRealStoreDir() + std::string(path, store->storeDir.size());
}
@ -33,11 +33,11 @@ struct LocalStoreAccessor : public FSAccessor
struct stat st;
if (lstat(realPath.c_str(), &st)) {
if (errno == ENOENT || errno == ENOTDIR) return {Type::tMissing, 0, false};
throw SysError(format("getting status of '%1%'") % path);
throw SysError("getting status of '%1%'", path);
}
if (!S_ISREG(st.st_mode) && !S_ISDIR(st.st_mode) && !S_ISLNK(st.st_mode))
throw Error(format("file '%1%' has unsupported type") % path);
throw Error("file '%1%' has unsupported type", path);
return {
S_ISREG(st.st_mode) ? Type::tRegular :
@ -90,13 +90,13 @@ const string LocalFSStore::drvsLogDir = "drvs";
std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
{
auto path = path_.clone();
auto path = path_;
if (!path.isDerivation()) {
try {
auto info = queryPathInfo(path);
if (!info->deriver) return nullptr;
path = info->deriver->clone();
path = *info->deriver;
} catch (InvalidPath &) {
return nullptr;
}

View file

@ -87,18 +87,22 @@ LocalStore::LocalStore(const Params & params)
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
if (!gr)
printError(format("warning: the group '%1%' specified in 'build-users-group' does not exist")
% settings.buildUsersGroup);
logError({
.name = "'build-users-group' not found",
.hint = hintfmt(
"warning: the group '%1%' specified in 'build-users-group' does not exist",
settings.buildUsersGroup)
});
else {
struct stat st;
if (stat(realStoreDir.c_str(), &st))
throw SysError(format("getting attributes of path '%1%'") % realStoreDir);
throw SysError("getting attributes of path '%1%'", realStoreDir);
if (st.st_uid != 0 || st.st_gid != gr->gr_gid || (st.st_mode & ~S_IFMT) != perm) {
if (chown(realStoreDir.c_str(), 0, gr->gr_gid) == -1)
throw SysError(format("changing ownership of path '%1%'") % realStoreDir);
throw SysError("changing ownership of path '%1%'", realStoreDir);
if (chmod(realStoreDir.c_str(), perm) == -1)
throw SysError(format("changing permissions on path '%1%'") % realStoreDir);
throw SysError("changing permissions on path '%1%'", realStoreDir);
}
}
}
@ -109,12 +113,12 @@ LocalStore::LocalStore(const Params & params)
struct stat st;
while (path != "/") {
if (lstat(path.c_str(), &st))
throw SysError(format("getting status of '%1%'") % path);
throw SysError("getting status of '%1%'", path);
if (S_ISLNK(st.st_mode))
throw Error(format(
throw Error(
"the path '%1%' is a symlink; "
"this is not allowed for the Nix store and its parent directories")
% path);
"this is not allowed for the Nix store and its parent directories",
path);
path = dirOf(path);
}
}
@ -147,7 +151,7 @@ LocalStore::LocalStore(const Params & params)
globalLock = openLockFile(globalLockPath.c_str(), true);
if (!lockFile(globalLock.get(), ltRead, false)) {
printError("waiting for the big Nix store lock...");
printInfo("waiting for the big Nix store lock...");
lockFile(globalLock.get(), ltRead, true);
}
@ -155,8 +159,8 @@ LocalStore::LocalStore(const Params & params)
upgrade. */
int curSchema = getSchema();
if (curSchema > nixSchemaVersion)
throw Error(format("current Nix store schema is version %1%, but I only support %2%")
% curSchema % nixSchemaVersion);
throw Error("current Nix store schema is version %1%, but I only support %2%",
curSchema, nixSchemaVersion);
else if (curSchema == 0) { /* new store */
curSchema = nixSchemaVersion;
@ -178,7 +182,7 @@ LocalStore::LocalStore(const Params & params)
"please upgrade Nix to version 1.11 first.");
if (!lockFile(globalLock.get(), ltWrite, false)) {
printError("waiting for exclusive access to the Nix store...");
printInfo("waiting for exclusive access to the Nix store...");
lockFile(globalLock.get(), ltWrite, true);
}
@ -256,7 +260,7 @@ LocalStore::~LocalStore()
}
if (future.valid()) {
printError("waiting for auto-GC to finish on exit...");
printInfo("waiting for auto-GC to finish on exit...");
future.get();
}
@ -284,7 +288,7 @@ int LocalStore::getSchema()
if (pathExists(schemaPath)) {
string s = readFile(schemaPath);
if (!string2Int(s, curSchema))
throw Error(format("'%1%' is corrupt") % schemaPath);
throw Error("'%1%' is corrupt", schemaPath);
}
return curSchema;
}
@ -293,7 +297,7 @@ int LocalStore::getSchema()
void LocalStore::openDB(State & state, bool create)
{
if (access(dbDir.c_str(), R_OK | W_OK))
throw SysError(format("Nix database directory '%1%' is not writable") % dbDir);
throw SysError("Nix database directory '%1%' is not writable", dbDir);
/* Open the Nix database. */
string dbPath = dbDir + "/db.sqlite";
@ -367,7 +371,7 @@ void LocalStore::makeStoreWritable()
throw SysError("setting up a private mount namespace");
if (mount(0, realStoreDir.c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
throw SysError(format("remounting %1% writable") % realStoreDir);
throw SysError("remounting %1% writable", realStoreDir);
}
#endif
}
@ -388,7 +392,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
| 0444
| (st.st_mode & S_IXUSR ? 0111 : 0);
if (chmod(path.c_str(), mode) == -1)
throw SysError(format("changing mode of '%1%' to %2$o") % path % mode);
throw SysError("changing mode of '%1%' to %2$o", path, mode);
}
}
@ -406,7 +410,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
#else
if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)
#endif
throw SysError(format("changing modification time of '%1%'") % path);
throw SysError("changing modification time of '%1%'", path);
}
}
@ -415,7 +419,7 @@ void canonicaliseTimestampAndPermissions(const Path & path)
{
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path '%1%'") % path);
throw SysError("getting attributes of path '%1%'", path);
canonicaliseTimestampAndPermissions(path, st);
}
@ -430,17 +434,17 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
setattrlist() to remove other attributes as well. */
if (lchflags(path.c_str(), 0)) {
if (errno != ENOTSUP)
throw SysError(format("clearing flags of path '%1%'") % path);
throw SysError("clearing flags of path '%1%'", path);
}
#endif
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path '%1%'") % path);
throw SysError("getting attributes of path '%1%'", path);
/* Really make sure that the path is of a supported type. */
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
throw Error(format("file '%1%' has an unsupported type") % path);
throw Error("file '%1%' has an unsupported type", path);
#if __linux__
/* Remove extended attributes / ACLs. */
@ -474,7 +478,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
if (fromUid != (uid_t) -1 && st.st_uid != fromUid) {
assert(!S_ISDIR(st.st_mode));
if (inodesSeen.find(Inode(st.st_dev, st.st_ino)) == inodesSeen.end())
throw BuildError(format("invalid ownership on file '%1%'") % path);
throw BuildError("invalid ownership on file '%1%'", path);
mode_t mode = st.st_mode & ~S_IFMT;
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
return;
@ -498,8 +502,8 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
if (!S_ISLNK(st.st_mode) &&
chown(path.c_str(), geteuid(), getegid()) == -1)
#endif
throw SysError(format("changing owner of '%1%' to %2%")
% path % geteuid());
throw SysError("changing owner of '%1%' to %2%",
path, geteuid());
}
if (S_ISDIR(st.st_mode)) {
@ -518,11 +522,11 @@ void canonicalisePathMetaData(const Path & path, uid_t fromUid, InodesSeen & ino
be a symlink, since we can't change its ownership. */
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path '%1%'") % path);
throw SysError("getting attributes of path '%1%'", path);
if (st.st_uid != geteuid()) {
assert(S_ISLNK(st.st_mode));
throw Error(format("wrong ownership of top-level store path '%1%'") % path);
throw Error("wrong ownership of top-level store path '%1%'", path);
}
}
@ -582,7 +586,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use()
(printStorePath(info.path))
(info.narHash.to_string(Base::Base16))
(info.narHash.to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0)
@ -597,7 +601,7 @@ uint64_t LocalStore::addValidPath(State & state,
efficiently query whether a path is an output of some
derivation. */
if (info.path.isDerivation()) {
auto drv = readDerivation(*this, realStoreDir + "/" + std::string(info.path.to_string()));
auto drv = readDerivation(info.path);
/* Verify that the output paths in the derivation are correct
(i.e., follow the scheme for computing output paths from
@ -617,7 +621,7 @@ uint64_t LocalStore::addValidPath(State & state,
{
auto state_(Store::state.lock());
state_->pathInfoCache.upsert(storePathToHash(printStorePath(info.path)),
state_->pathInfoCache.upsert(std::string(info.path.hashPart()),
PathInfoCacheValue{ .value = std::make_shared<const ValidPathInfo>(info) });
}
@ -629,7 +633,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{
try {
auto info = std::make_shared<ValidPathInfo>(path.clone());
auto info = std::make_shared<ValidPathInfo>(path);
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
auto state(_state.lock());
@ -682,7 +686,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{
state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0)
(info.narHash.to_string(Base::Base16))
(info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca)
@ -719,7 +723,7 @@ StorePathSet LocalStore::queryValidPaths(const StorePathSet & paths, SubstituteF
{
StorePathSet res;
for (auto & i : paths)
if (isValidPath(i)) res.insert(i.clone());
if (isValidPath(i)) res.insert(i);
return res;
}
@ -787,26 +791,9 @@ StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path)
}
StringSet LocalStore::queryDerivationOutputNames(const StorePath & path)
{
return retrySQLite<StringSet>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
StringSet outputNames;
while (useQueryDerivationOutputs.next())
outputNames.insert(useQueryDerivationOutputs.getStr(0));
return outputNames;
});
}
std::optional<StorePath> LocalStore::queryPathFromHashPart(const std::string & hashPart)
{
if (hashPart.size() != storePathHashLen) throw Error("invalid hash part");
if (hashPart.size() != StorePath::HashLen) throw Error("invalid hash part");
Path prefix = storeDir + "/" + hashPart;
@ -831,7 +818,7 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
StorePathSet remaining;
for (auto & i : paths)
remaining.insert(i.clone());
remaining.insert(i);
StorePathSet res;
@ -845,9 +832,9 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
StorePathSet remaining2;
for (auto & path : remaining)
if (valid.count(path))
res.insert(path.clone());
res.insert(path);
else
remaining2.insert(path.clone());
remaining2.insert(path);
std::swap(remaining, remaining2);
}
@ -869,16 +856,16 @@ void LocalStore::querySubstitutablePathInfos(const StorePathSet & paths,
auto info = sub->queryPathInfo(path);
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
std::shared_ptr<const ValidPathInfo>(info));
infos.insert_or_assign(path.clone(), SubstitutablePathInfo{
info->deriver ? info->deriver->clone() : std::optional<StorePath>(),
cloneStorePathSet(info->references),
infos.insert_or_assign(path, SubstitutablePathInfo{
info->deriver,
info->references,
narInfo ? narInfo->fileSize : 0,
info->narSize});
} catch (InvalidPath &) {
} catch (SubstituterDisabled &) {
} catch (Error & e) {
if (settings.tryFallback)
printError(e.what());
logError(e.info());
else
throw;
}
@ -910,12 +897,12 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & i : infos) {
assert(i.narHash.type == HashType::SHA256);
assert(i.narHash.type == htSHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
addValidPath(*state, i, false);
paths.insert(i.path.clone());
paths.insert(i.path);
}
for (auto & i : infos) {
@ -930,8 +917,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
for (auto & i : infos)
if (i.path.isDerivation()) {
// FIXME: inefficient; we already loaded the derivation in addValidPath().
checkDerivationOutputs(i.path,
readDerivation(*this, realStoreDir + "/" + std::string(i.path.to_string())));
checkDerivationOutputs(i.path, readDerivation(i.path));
}
/* Do a topological sort of the paths. This will throw an
@ -958,7 +944,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path)
{
auto state_(Store::state.lock());
state_->pathInfoCache.erase(storePathToHash(printStorePath(path)));
state_->pathInfoCache.erase(std::string(path.hashPart()));
}
}
@ -1008,9 +994,9 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink;
if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(HashType::SHA256);
hashSink = std::make_unique<HashSink>(htSHA256);
else
hashSink = std::make_unique<HashModuloSink>(HashType::SHA256, storePathToHash(printStorePath(info.path)));
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
size_t n = source.read(data, len);
@ -1024,7 +1010,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(), hashResult.first.to_string());
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@ -1083,14 +1069,14 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
sha256); otherwise, compute it here. */
HashResult hash;
if (method == FileIngestionMethod::Recursive) {
hash.first = hashAlgo == HashType::SHA256 ? h : hashString(HashType::SHA256, dump);
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
hash.second = dump.size();
} else
hash = hashPath(HashType::SHA256, realPath);
hash = hashPath(htSHA256, realPath);
optimisePath(realPath); // FIXME: combine with hashPath()
ValidPathInfo info(dstPath.clone());
ValidPathInfo info(dstPath);
info.narHash = hash.first;
info.narSize = hash.second;
info.ca = FileSystemHash { method, h };
@ -1125,7 +1111,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
StorePath LocalStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair)
{
auto hash = hashString(HashType::SHA256, s);
auto hash = hashString(htSHA256, s);
auto dstPath = makeTextPath(name, hash, references);
addTempRoot(dstPath);
@ -1149,14 +1135,14 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
StringSink sink;
dumpString(s, sink);
auto narHash = hashString(HashType::SHA256, *sink.s);
auto narHash = hashString(htSHA256, *sink.s);
optimisePath(realPath);
ValidPathInfo info(dstPath.clone());
ValidPathInfo info(dstPath);
info.narHash = narHash;
info.narSize = sink.s->size();
info.references = cloneStorePathSet(references);
info.references = references;
info.ca = TextHash { .hash = hash };
registerValidPath(info);
}
@ -1207,7 +1193,7 @@ void LocalStore::invalidatePathChecked(const StorePath & path)
bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
{
printError(format("reading the Nix store..."));
printInfo(format("reading the Nix store..."));
bool errors = false;
@ -1235,16 +1221,19 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printInfo("checking link hashes...");
for (auto & link : readDirectory(linksDir)) {
printMsg(Verbosity::Talkative, "checking contents of '%s'", link.name);
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name;
string hash = hashPath(HashType::SHA256, linkPath).first.to_string(Base::Base32, false);
string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
if (hash != link.name) {
printError(
logError({
.name = "Invalid hash",
.hint = hintfmt(
"link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash);
linkPath, link.name, hash)
});
if (repair) {
if (unlink(linkPath.c_str()) == 0)
printError("removed link '%s'", linkPath);
printInfo("removed link '%s'", linkPath);
else
throw SysError("removing corrupt link '%s'", linkPath);
} else {
@ -1255,27 +1244,30 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printInfo("checking store hashes...");
Hash nullHash(HashType::SHA256);
Hash nullHash(htSHA256);
for (auto & i : validPaths) {
try {
auto info = std::const_pointer_cast<ValidPathInfo>(std::shared_ptr<const ValidPathInfo>(queryPathInfo(i)));
/* Check the content hash (optionally - slow). */
printMsg(Verbosity::Talkative, "checking contents of '%s'", printStorePath(i));
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(*info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, storePathToHash(printStorePath(info->path)));
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
if (info->narHash != nullHash && info->narHash != current.first) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(), current.first.to_string());
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
});
if (repair) repairPath(i); else errors = true;
} else {
@ -1283,14 +1275,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Fill in missing hashes. */
if (info->narHash == nullHash) {
printError("fixing missing hash on '%s'", printStorePath(i));
printInfo("fixing missing hash on '%s'", printStorePath(i));
info->narHash = current.first;
update = true;
}
/* Fill in missing narSize fields (from old stores). */
if (info->narSize == 0) {
printError("updating size field on '%s' to %s", printStorePath(i), current.second);
printInfo("updating size field on '%s' to %s", printStorePath(i), current.second);
info->narSize = current.second;
update = true;
}
@ -1306,7 +1298,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* It's possible that the path got GC'ed, so ignore
errors on invalid paths. */
if (isValidPath(i))
printError("error: %s", e.msg());
logError(e.info());
else
warn(e.msg());
errors = true;
@ -1326,7 +1318,10 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
if (!done.insert(pathS).second) return;
if (!isStorePath(pathS)) {
printError("path '%s' is not in the Nix store", pathS);
logError({
.name = "Nix path not found",
.hint = hintfmt("path '%s' is not in the Nix store", pathS)
});
return;
}
@ -1345,16 +1340,19 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
}
if (canInvalidate) {
printError("path '%s' disappeared, removing from database...", pathS);
printInfo("path '%s' disappeared, removing from database...", pathS);
auto state(_state.lock());
invalidatePath(*state, path);
} else {
printError("path '%s' disappeared, but it still has valid referrers!", pathS);
logError({
.name = "Missing path with referrers",
.hint = hintfmt("path '%s' disappeared, but it still has valid referrers!", pathS)
});
if (repair)
try {
repairPath(path);
} catch (Error & e) {
warn(e.msg());
logWarning(e.info());
errors = true;
}
else errors = true;
@ -1394,7 +1392,7 @@ static void makeMutable(const Path & path)
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_NOFOLLOW | O_CLOEXEC);
if (fd == -1) {
if (errno == ELOOP) return; // it's a symlink
throw SysError(format("opening file '%1%'") % path);
throw SysError("opening file '%1%'", path);
}
unsigned int flags = 0, old;
@ -1412,7 +1410,7 @@ static void makeMutable(const Path & path)
void LocalStore::upgradeStore7()
{
if (getuid() != 0) return;
printError("removing immutable bits from the Nix store (this may take a while)...");
printInfo("removing immutable bits from the Nix store (this may take a while)...");
makeMutable(realStoreDir);
}

View file

@ -135,8 +135,6 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
StringSet queryDerivationOutputNames(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
@ -157,7 +155,7 @@ public:
true) or simply the contents of a regular file (if recursive ==
false). */
StorePath addToStoreFromDump(const string & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair) override;
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;

View file

@ -6,7 +6,7 @@ libstore_DIR := $(d)
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc)
libstore_LIBS = libutil libnixrust
libstore_LIBS = libutil
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifneq ($(OS), FreeBSD)

View file

@ -103,7 +103,7 @@ void Store::computeFSClosure(const StorePath & startPath,
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
{
StorePathSet paths;
paths.insert(startPath.clone());
paths.insert(startPath);
computeFSClosure(paths, paths_, flipDirection, includeOutputs, includeDerivers);
}
@ -112,7 +112,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_,
unsigned long long & downloadSize_, unsigned long long & narSize_)
{
Activity act(*logger, Verbosity::Debug, ActivityType::Unknown, "querying info about missing paths");
Activity act(*logger, lvlDebug, actUnknown, "querying info about missing paths");
downloadSize_ = narSize_ = 0;
@ -141,11 +141,11 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) {
{
auto state(state_.lock());
state->willBuild.insert(drvPath.clone());
state->willBuild.insert(drvPath);
}
for (auto & i : drv.inputDrvs)
pool.enqueue(std::bind(doPath, StorePathWithOutputs(i.first, i.second)));
pool.enqueue(std::bind(doPath, StorePathWithOutputs { i.first, i.second }));
};
auto checkOutput = [&](
@ -157,9 +157,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
auto outPath = parseStorePath(outPathS);
SubstitutablePathInfos infos;
StorePathSet paths; // FIXME
paths.insert(outPath.clone());
querySubstitutablePathInfos(paths, infos);
querySubstitutablePathInfos({outPath}, infos);
if (infos.empty()) {
drvState_->lock()->done = true;
@ -170,10 +168,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (drvState->done) return;
assert(drvState->left);
drvState->left--;
drvState->outPaths.insert(outPath.clone());
drvState->outPaths.insert(outPath);
if (!drvState->left) {
for (auto & path : drvState->outPaths)
pool.enqueue(std::bind(doPath, StorePathWithOutputs(path.clone())));
pool.enqueue(std::bind(doPath, StorePathWithOutputs { path } ));
}
}
}
@ -190,12 +188,12 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (!isValidPath(path.path)) {
// FIXME: we could try to substitute the derivation.
auto state(state_.lock());
state->unknown.insert(path.path.clone());
state->unknown.insert(path.path);
return;
}
auto drv = make_ref<Derivation>(derivationFromPath(path.path));
ParsedDerivation parsedDrv(path.path.clone(), *drv);
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid;
for (auto & j : drv->outputs)
@ -216,13 +214,11 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (isValidPath(path.path)) return;
SubstitutablePathInfos infos;
StorePathSet paths; // FIXME
paths.insert(path.path.clone());
querySubstitutablePathInfos(paths, infos);
querySubstitutablePathInfos({path.path}, infos);
if (infos.empty()) {
auto state(state_.lock());
state->unknown.insert(path.path.clone());
state->unknown.insert(path.path);
return;
}
@ -231,13 +227,13 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
{
auto state(state_.lock());
state->willSubstitute.insert(path.path.clone());
state->willSubstitute.insert(path.path);
state->downloadSize += info->second.downloadSize;
state->narSize += info->second.narSize;
}
for (auto & ref : info->second.references)
pool.enqueue(std::bind(doPath, StorePathWithOutputs(ref)));
pool.enqueue(std::bind(doPath, StorePathWithOutputs { ref }));
}
};
@ -260,12 +256,12 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
throw BuildError("cycle detected in the references of '%s' from '%s'",
printStorePath(path), printStorePath(*parent));
if (!visited.insert(path.clone()).second) return;
parents.insert(path.clone());
if (!visited.insert(path).second) return;
parents.insert(path);
StorePathSet references;
try {
references = cloneStorePathSet(queryPathInfo(path)->references);
references = queryPathInfo(path)->references;
} catch (InvalidPath &) {
}
@ -275,7 +271,7 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
if (i != path && paths.count(i))
dfsVisit(i, &path);
sorted.push_back(path.clone());
sorted.push_back(path);
parents.erase(path);
};

View file

@ -184,7 +184,7 @@ struct NarAccessor : public FSAccessor
auto i = get(path);
if (i.type != FSAccessor::Type::tDirectory)
throw Error(format("path '%1%' inside NAR file is not a directory") % path);
throw Error("path '%1%' inside NAR file is not a directory", path);
StringSet res;
for (auto & child : i.children)
@ -197,7 +197,7 @@ struct NarAccessor : public FSAccessor
{
auto i = get(path);
if (i.type != FSAccessor::Type::tRegular)
throw Error(format("path '%1%' inside NAR file is not a regular file") % path);
throw Error("path '%1%' inside NAR file is not a regular file", path);
if (getNarBytes) return getNarBytes(i.start, i.size);
@ -209,7 +209,7 @@ struct NarAccessor : public FSAccessor
{
auto i = get(path);
if (i.type != FSAccessor::Type::tSymlink)
throw Error(format("path '%1%' inside NAR file is not a symlink") % path);
throw Error("path '%1%' inside NAR file is not a symlink", path);
return i.target;
}
};

View file

@ -189,7 +189,7 @@ public:
return {oInvalid, 0};
auto namePart = queryNAR.getStr(1);
auto narInfo = make_ref<NarInfo>(StorePath::fromBaseName(hashPart + "-" + namePart));
auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart));
narInfo->url = queryNAR.getStr(2);
narInfo->compression = queryNAR.getStr(3);
if (!queryNAR.isNull(4))
@ -198,9 +198,9 @@ public:
narInfo->narHash = Hash(queryNAR.getStr(6));
narInfo->narSize = queryNAR.getInt(7);
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
narInfo->references.insert(StorePath::fromBaseName(r));
narInfo->references.insert(StorePath(r));
if (!queryNAR.isNull(9))
narInfo->deriver = StorePath::fromBaseName(queryNAR.getStr(9));
narInfo->deriver = StorePath(queryNAR.getStr(9));
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
narInfo->sigs.insert(sig);
narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
@ -230,9 +230,9 @@ public:
(std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string() : "", narInfo && narInfo->fileHash)
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string())
(info->narHash.to_string(Base32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -4,10 +4,10 @@
namespace nix {
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
: ValidPathInfo(StorePath::dummy) // FIXME: hack
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
{
auto corrupt = [&]() {
throw Error(format("NAR info file '%1%' is corrupt") % whence);
throw Error("NAR info file '%1%' is corrupt", whence);
};
auto parseHashField = [&](const string & s) {
@ -56,11 +56,11 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
auto refs = tokenizeString<Strings>(value, " ");
if (!references.empty()) corrupt();
for (auto & r : refs)
references.insert(StorePath::fromBaseName(r));
references.insert(StorePath(r));
}
else if (name == "Deriver") {
if (value != "unknown-deriver")
deriver = StorePath::fromBaseName(value);
deriver = StorePath(value);
}
else if (name == "System")
system = value;
@ -87,11 +87,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n";
assert(compression != "");
res += "Compression: " + compression + "\n";
assert(fileHash.type == HashType::SHA256);
res += "FileHash: " + fileHash.to_string(Base::Base32) + "\n";
assert(fileHash.type == htSHA256);
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == HashType::SHA256);
res += "NarHash: " + narHash.to_string(Base::Base32) + "\n";
assert(narHash.type == htSHA256);
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -19,9 +19,9 @@ static void makeWritable(const Path & path)
{
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path '%1%'") % path);
throw SysError("getting attributes of path '%1%'", path);
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError(format("changing writability of '%1%'") % path);
throw SysError("changing writability of '%1%'", path);
}
@ -47,7 +47,7 @@ LocalStore::InodeHash LocalStore::loadInodeHash()
InodeHash inodeHash;
AutoCloseDir dir(opendir(linksDir.c_str()));
if (!dir) throw SysError(format("opening directory '%1%'") % linksDir);
if (!dir) throw SysError("opening directory '%1%'", linksDir);
struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) { /* sic */
@ -55,9 +55,9 @@ LocalStore::InodeHash LocalStore::loadInodeHash()
// We don't care if we hit non-hash files, anything goes
inodeHash.insert(dirent->d_ino);
}
if (errno) throw SysError(format("reading directory '%1%'") % linksDir);
if (errno) throw SysError("reading directory '%1%'", linksDir);
printMsg(Verbosity::Talkative, format("loaded %1% hash inodes") % inodeHash.size());
printMsg(lvlTalkative, format("loaded %1% hash inodes") % inodeHash.size());
return inodeHash;
}
@ -68,7 +68,7 @@ Strings LocalStore::readDirectoryIgnoringInodes(const Path & path, const InodeHa
Strings names;
AutoCloseDir dir(opendir(path.c_str()));
if (!dir) throw SysError(format("opening directory '%1%'") % path);
if (!dir) throw SysError("opening directory '%1%'", path);
struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) { /* sic */
@ -83,7 +83,7 @@ Strings LocalStore::readDirectoryIgnoringInodes(const Path & path, const InodeHa
if (name == "." || name == "..") continue;
names.push_back(name);
}
if (errno) throw SysError(format("reading directory '%1%'") % path);
if (errno) throw SysError("reading directory '%1%'", path);
return names;
}
@ -96,7 +96,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path '%1%'") % path);
throw SysError("getting attributes of path '%1%'", path);
#if __APPLE__
/* HFS/macOS has some undocumented security feature disabling hardlinking for
@ -130,7 +130,10 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
NixOS (example: $fontconfig/var/cache being modified). Skip
those files. FIXME: check the modification time. */
if (S_ISREG(st.st_mode) && (st.st_mode & S_IWUSR)) {
printError(format("skipping suspicious writable file '%1%'") % path);
logWarning({
.name = "Suspicious file",
.hint = hintfmt("skipping suspicious writable file '%1%'", path)
});
return;
}
@ -149,11 +152,11 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
Hash hash = hashPath(HashType::SHA256, path).first;
debug(format("'%1%' has hash '%2%'") % path % hash.to_string());
Hash hash = hashPath(htSHA256, path).first;
debug(format("'%1%' has hash '%2%'") % path % hash.to_string(Base32, true));
/* Check if this is a known hash. */
Path linkPath = linksDir + "/" + hash.to_string(Base::Base32, false);
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
retry:
if (!pathExists(linkPath)) {
@ -186,7 +189,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
current file with a hard link to that file. */
struct stat stLink;
if (lstat(linkPath.c_str(), &stLink))
throw SysError(format("getting attributes of path '%1%'") % linkPath);
throw SysError("getting attributes of path '%1%'", linkPath);
if (st.st_ino == stLink.st_ino) {
debug(format("'%1%' is already linked to '%2%'") % path % linkPath);
@ -194,12 +197,15 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
}
if (st.st_size != stLink.st_size) {
printError(format("removing corrupted link '%1%'") % linkPath);
logWarning({
.name = "Corrupted link",
.hint = hintfmt("removing corrupted link '%1%'", linkPath)
});
unlink(linkPath.c_str());
goto retry;
}
printMsg(Verbosity::Talkative, format("linking '%1%' to '%2%'") % path % linkPath);
printMsg(lvlTalkative, format("linking '%1%' to '%2%'") % path % linkPath);
/* Make the containing directory writable, but only if it's not
the store itself (we don't want or need to mess with its
@ -229,7 +235,10 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
/* Atomically replace the old file with the new hard link. */
if (rename(tempLink.c_str(), path.c_str()) == -1) {
if (unlink(tempLink.c_str()) == -1)
printError(format("unable to unlink '%1%'") % tempLink);
logError({
.name = "Unlink error",
.hint = hintfmt("unable to unlink '%1%'", tempLink)
});
if (errno == EMLINK) {
/* Some filesystems generate too many links on the rename,
rather than on the original link. (Probably it
@ -238,7 +247,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
debug("'%s' has reached maximum number of links", linkPath);
return;
}
throw SysError(format("cannot rename '%1%' to '%2%'") % tempLink % path);
throw SysError("cannot rename '%1%' to '%2%'", tempLink, path);
}
stats.filesLinked++;
@ -246,13 +255,13 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
stats.blocksFreed += st.st_blocks;
if (act)
act->result(ResultType::FileLinked, st.st_size, st.st_blocks);
act->result(resFileLinked, st.st_size, st.st_blocks);
}
void LocalStore::optimiseStore(OptimiseStats & stats)
{
Activity act(*logger, ActivityType::OptimiseStore);
Activity act(*logger, actOptimiseStore);
auto paths = queryAllValidPaths();
InodeHash inodeHash = loadInodeHash();
@ -265,7 +274,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
addTempRoot(i);
if (!isValidPath(i)) continue; /* path was GC'ed, probably */
{
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("optimising path '%s'", printStorePath(i)));
Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i)));
optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash);
}
done++;

View file

@ -4,8 +4,8 @@
namespace nix {
ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv)
: drvPath(std::move(drvPath)), drv(drv)
ParsedDerivation::ParsedDerivation(const StorePath & drvPath, BasicDerivation & drv)
: drvPath(drvPath), drv(drv)
{
/* Parse the __json attribute, if any. */
auto jsonAttr = drv.env.find("__json");
@ -117,4 +117,9 @@ bool ParsedDerivation::substitutesAllowed() const
return getBoolAttr("allowSubstitutes", true);
}
bool ParsedDerivation::contentAddressed() const
{
return getBoolAttr("__contentAddressed", false);
}
}

View file

@ -12,7 +12,7 @@ class ParsedDerivation
public:
ParsedDerivation(StorePath && drvPath, BasicDerivation & drv);
ParsedDerivation(const StorePath & drvPath, BasicDerivation & drv);
~ParsedDerivation();
@ -34,6 +34,8 @@ public:
bool willBuildLocally() const;
bool substitutesAllowed() const;
bool contentAddressed() const;
};
}

View file

@ -2,38 +2,38 @@
namespace nix {
extern "C" {
rust::Result<StorePath> ffi_StorePath_new(rust::StringSlice path, rust::StringSlice storeDir);
rust::Result<StorePath> ffi_StorePath_new2(unsigned char hash[20], rust::StringSlice storeDir);
rust::Result<StorePath> ffi_StorePath_fromBaseName(rust::StringSlice baseName);
rust::String ffi_StorePath_to_string(const StorePath & _this);
StorePath ffi_StorePath_clone(const StorePath & _this);
rust::StringSlice ffi_StorePath_name(const StorePath & _this);
MakeError(BadStorePath, Error);
static void checkName(std::string_view path, std::string_view name)
{
if (name.empty())
throw BadStorePath("store path '%s' has an empty name", path);
if (name.size() > 211)
throw BadStorePath("store path '%s' has a name longer than 211 characters", path);
for (auto c : name)
if (!((c >= '0' && c <= '9')
|| (c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z')
|| c == '+' || c == '-' || c == '.' || c == '_' || c == '?' || c == '='))
throw BadStorePath("store path '%s' contains illegal character '%s'", path, c);
}
StorePath StorePath::make(std::string_view path, std::string_view storeDir)
StorePath::StorePath(std::string_view _baseName)
: baseName(_baseName)
{
return ffi_StorePath_new((rust::StringSlice) path, (rust::StringSlice) storeDir).unwrap();
if (baseName.size() < HashLen + 1)
throw BadStorePath("'%s' is too short to be a valid store path", baseName);
for (auto c : hashPart())
if (c == 'e' || c == 'o' || c == 'u' || c == 't'
|| !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z')))
throw BadStorePath("store path '%s' contains illegal base-32 character '%s'", baseName, c);
checkName(baseName, name());
}
StorePath StorePath::make(unsigned char hash[20], std::string_view name)
StorePath::StorePath(const Hash & hash, std::string_view _name)
: baseName((hash.to_string(Base32, false) + "-").append(std::string(_name)))
{
return ffi_StorePath_new2(hash, (rust::StringSlice) name).unwrap();
}
StorePath StorePath::fromBaseName(std::string_view baseName)
{
return ffi_StorePath_fromBaseName((rust::StringSlice) baseName).unwrap();
}
rust::String StorePath::to_string() const
{
return ffi_StorePath_to_string(*this);
}
StorePath StorePath::clone() const
{
return ffi_StorePath_clone(*this);
checkName(baseName, name());
}
bool StorePath::isDerivation() const
@ -41,18 +41,14 @@ bool StorePath::isDerivation() const
return hasSuffix(name(), drvExtension);
}
std::string_view StorePath::name() const
{
return ffi_StorePath_name(*this);
}
StorePath StorePath::dummy(
StorePath::make(
(unsigned char *) "xxxxxxxxxxxxxxxxxxxx", "x"));
StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
StorePath Store::parseStorePath(std::string_view path) const
{
return StorePath::make(path, storeDir);
auto p = canonPath(std::string(path));
if (dirOf(p) != storeDir)
throw BadStorePath("path '%s' is not in the Nix store", p);
return StorePath(baseNameOf(p));
}
std::optional<StorePath> Store::maybeParseStorePath(std::string_view path) const
@ -78,9 +74,7 @@ StorePathSet Store::parseStorePathSet(const PathSet & paths) const
std::string Store::printStorePath(const StorePath & path) const
{
auto s = storeDir + "/";
s += (std::string_view) path.to_string();
return s;
return (storeDir + "/").append(path.to_string());
}
PathSet Store::printStorePathSet(const StorePathSet & paths) const
@ -90,29 +84,6 @@ PathSet Store::printStorePathSet(const StorePathSet & paths) const
return res;
}
StorePathSet cloneStorePathSet(const StorePathSet & paths)
{
StorePathSet res;
for (auto & p : paths)
res.insert(p.clone());
return res;
}
StorePathSet storePathsToSet(const StorePaths & paths)
{
StorePathSet res;
for (auto & p : paths)
res.insert(p.clone());
return res;
}
StorePathSet singleton(const StorePath & path)
{
StorePathSet res;
res.insert(path.clone());
return res;
}
std::pair<std::string_view, StringSet> parsePathWithOutputs(std::string_view s)
{
size_t n = s.find("!");

View file

@ -1,74 +1,60 @@
#pragma once
#include "rust-ffi.hh"
#include "content-address.hh"
#include "types.hh"
namespace nix {
/* See path.rs. */
struct StorePath;
class Store;
struct Hash;
extern "C" {
void ffi_StorePath_drop(void *);
bool ffi_StorePath_less_than(const StorePath & a, const StorePath & b);
bool ffi_StorePath_eq(const StorePath & a, const StorePath & b);
void ffi_StorePath_clone_to(const StorePath & _other, StorePath & _this);
unsigned char * ffi_StorePath_hash_data(const StorePath & p);
}
struct StorePath : rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop>
class StorePath
{
std::string baseName;
public:
/* Size of the hash part of store paths, in base-32 characters. */
constexpr static size_t HashLen = 32; // i.e. 160 bits
StorePath() = delete;
static StorePath make(std::string_view path, std::string_view storeDir);
StorePath(std::string_view baseName);
static StorePath make(unsigned char hash[20], std::string_view name);
StorePath(const Hash & hash, std::string_view name);
static StorePath fromBaseName(std::string_view baseName);
rust::String to_string() const;
std::string_view to_string() const
{
return baseName;
}
bool operator < (const StorePath & other) const
{
return ffi_StorePath_less_than(*this, other);
return baseName < other.baseName;
}
bool operator == (const StorePath & other) const
{
return ffi_StorePath_eq(*this, other);
return baseName == other.baseName;
}
bool operator != (const StorePath & other) const
{
return !(*this == other);
return baseName != other.baseName;
}
StorePath(StorePath && that) = default;
StorePath(const StorePath & that)
{
ffi_StorePath_clone_to(that, *this);
}
void operator = (const StorePath & that)
{
(rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop>::operator = (that));
ffi_StorePath_clone_to(that, *this);
}
StorePath clone() const;
/* Check whether a file name ends with the extension for
derivations. */
bool isDerivation() const;
std::string_view name() const;
unsigned char * hashData() const
std::string_view name() const
{
return ffi_StorePath_hash_data(*this);
return std::string_view(baseName).substr(HashLen + 1);
}
std::string_view hashPart() const
{
return std::string_view(baseName).substr(0, HashLen);
}
static StorePath dummy;
@ -77,14 +63,6 @@ struct StorePath : rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop>
typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths;
StorePathSet cloneStorePathSet(const StorePathSet & paths);
StorePathSet storePathsToSet(const StorePaths & paths);
StorePathSet singleton(const StorePath & path);
/* Size of the hash part of store paths, in base-32 characters. */
const size_t storePathHashLen = 32; // i.e. 160 bits
/* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv";
@ -93,18 +71,6 @@ struct StorePathWithOutputs
StorePath path;
std::set<std::string> outputs;
StorePathWithOutputs(const StorePath & path, const std::set<std::string> & outputs = {})
: path(path.clone()), outputs(outputs)
{ }
StorePathWithOutputs(StorePath && path, std::set<std::string> && outputs)
: path(std::move(path)), outputs(std::move(outputs))
{ }
StorePathWithOutputs(const StorePathWithOutputs & other)
: path(other.path.clone()), outputs(other.outputs)
{ }
std::string to_string(const Store & store) const;
};
@ -117,7 +83,7 @@ namespace std {
template<> struct hash<nix::StorePath> {
std::size_t operator()(const nix::StorePath & path) const noexcept
{
return * (std::size_t *) path.hashData();
return * (std::size_t *) path.to_string().data();
}
};

View file

@ -20,7 +20,7 @@ AutoCloseFD openLockFile(const Path & path, bool create)
fd = open(path.c_str(), O_CLOEXEC | O_RDWR | (create ? O_CREAT : 0), 0600);
if (!fd && (create || errno != ENOENT))
throw SysError(format("opening lock file '%1%'") % path);
throw SysError("opening lock file '%1%'", path);
return fd;
}
@ -51,7 +51,7 @@ bool lockFile(int fd, LockType lockType, bool wait)
while (flock(fd, type) != 0) {
checkInterrupt();
if (errno != EINTR)
throw SysError(format("acquiring/releasing lock"));
throw SysError("acquiring/releasing lock");
else
return false;
}
@ -60,7 +60,7 @@ bool lockFile(int fd, LockType lockType, bool wait)
checkInterrupt();
if (errno == EWOULDBLOCK) return false;
if (errno != EINTR)
throw SysError(format("acquiring/releasing lock"));
throw SysError("acquiring/releasing lock");
}
}
@ -124,7 +124,7 @@ bool PathLocks::lockPaths(const PathSet & paths,
hasn't been unlinked). */
struct stat st;
if (fstat(fd.get(), &st) == -1)
throw SysError(format("statting lock file '%1%'") % lockPath);
throw SysError("statting lock file '%1%'", lockPath);
if (st.st_size != 0)
/* This lock file has been unlinked, so we're holding
a lock on a deleted file. This means that other
@ -160,7 +160,8 @@ void PathLocks::unlock()
if (close(i.first) == -1)
printError(
format("error (ignored): cannot close lock file on '%1%'") % i.second);
"error (ignored): cannot close lock file on '%1%'",
i.second);
debug(format("lock released on '%1%'") % i.second);
}

View file

@ -50,7 +50,7 @@ Generations findGenerations(Path profile, int & curGen)
gen.number = n;
struct stat st;
if (lstat(gen.path.c_str(), &st) != 0)
throw SysError(format("statting '%1%'") % gen.path);
throw SysError("statting '%1%'", gen.path);
gen.creationTime = st.st_mtime;
gens.push_back(gen);
}
@ -117,7 +117,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
static void removeFile(const Path & path)
{
if (remove(path.c_str()) == -1)
throw SysError(format("cannot unlink '%1%'") % path);
throw SysError("cannot unlink '%1%'", path);
}
@ -149,7 +149,7 @@ void deleteGenerations(const Path & profile, const std::set<unsigned int> & gens
Generations gens = findGenerations(profile, curGen);
if (gensToDelete.find(curGen) != gensToDelete.end())
throw Error(format("cannot delete current generation of profile %1%'") % profile);
throw Error("cannot delete current generation of profile %1%'", profile);
for (auto & i : gens) {
if (gensToDelete.find(i.number) == gensToDelete.end()) continue;
@ -226,7 +226,7 @@ void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, b
int days;
if (!string2Int(strDays, days) || days < 1)
throw Error(format("invalid number of days specifier '%1%'") % timeSpec);
throw Error("invalid number of days specifier '%1%'", timeSpec);
time_t oldTime = curTime - days * 24 * 3600;

View file

@ -54,7 +54,7 @@ struct RefScanSink : Sink
string tail;
RefScanSink() : hashSink(HashType::SHA256) { }
RefScanSink() : hashSink(htSHA256) { }
void operator () (const unsigned char * data, size_t len);
};
@ -92,11 +92,11 @@ PathSet scanForReferences(const string & path,
auto baseName = std::string(baseNameOf(i));
string::size_type pos = baseName.find('-');
if (pos == string::npos)
throw Error(format("bad reference '%1%'") % i);
throw Error("bad reference '%1%'", i);
string s = string(baseName, 0, pos);
assert(s.size() == refLength);
assert(backMap.find(s) == backMap.end());
// parseHash(HashType::SHA256, s);
// parseHash(htSHA256, s);
sink.hashes.insert(s);
backMap[s] = i;
}

View file

@ -19,7 +19,7 @@ RemoteFSAccessor::RemoteFSAccessor(ref<Store> store, const Path & cacheDir)
Path RemoteFSAccessor::makeCacheFile(const Path & storePath, const std::string & ext)
{
assert(cacheDir != "");
return fmt("%s/%s.%s", cacheDir, storePathToHash(storePath), ext);
return fmt("%s/%s.%s", cacheDir, store->parseStorePath(storePath).hashPart(), ext);
}
void RemoteFSAccessor::addToCache(const Path & storePath, const std::string & nar,
@ -51,7 +51,7 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_)
std::string restPath = std::string(path, storePath.size());
if (!store->isValidPath(store->parseStorePath(storePath)))
throw InvalidPath(format("path '%1%' is not a valid store path") % storePath);
throw InvalidPath("path '%1%' is not a valid store path", storePath);
auto i = nars.find(storePath);
if (i != nars.end()) return {i->second, restPath};

View file

@ -116,11 +116,11 @@ ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
struct sockaddr_un addr;
addr.sun_family = AF_UNIX;
if (socketPath.size() + 1 >= sizeof(addr.sun_path))
throw Error(format("socket path '%1%' is too long") % socketPath);
throw Error("socket path '%1%' is too long", socketPath);
strcpy(addr.sun_path, socketPath.c_str());
if (::connect(conn->fd.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1)
throw SysError(format("cannot connect to daemon at '%1%'") % socketPath);
throw SysError("cannot connect to daemon at '%1%'", socketPath);
conn->from.fd = conn->fd.get();
conn->to.fd = conn->fd.get();
@ -177,11 +177,11 @@ void RemoteStore::setOptions(Connection & conn)
<< settings.keepFailed
<< settings.keepGoing
<< settings.tryFallback
<< (uint64_t) verbosity
<< verbosity
<< settings.maxBuildJobs
<< settings.maxSilentTime
<< true
<< (uint64_t) (settings.verboseBuild ? Verbosity::Error : Verbosity::Vomit)
<< (settings.verboseBuild ? lvlError : lvlVomit)
<< 0 // obsolete log type
<< 0 /* obsolete print build trace */
<< settings.buildCores
@ -228,7 +228,7 @@ struct ConnectionHandle
~ConnectionHandle()
{
if (!daemonException && std::uncaught_exception()) {
if (!daemonException && std::uncaught_exceptions()) {
handle.markBad();
debug("closing daemon connection because of an exception");
}
@ -268,7 +268,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
StorePathSet res;
for (auto & i : paths)
if (isValidPath(i)) res.insert(i.clone());
if (isValidPath(i)) res.insert(i);
return res;
} else {
conn->to << wopQueryValidPaths;
@ -296,7 +296,7 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths)
for (auto & i : paths) {
conn->to << wopHasSubstitutes << printStorePath(i);
conn.processStderr();
if (readInt(conn->from)) res.insert(i.clone());
if (readInt(conn->from)) res.insert(i);
}
return res;
} else {
@ -329,7 +329,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathSet & paths,
info.references = readStorePaths<StorePathSet>(*this, conn->from);
info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from);
infos.insert_or_assign(i.clone(), std::move(info));
infos.insert_or_assign(i, std::move(info));
}
} else {
@ -365,17 +365,17 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
} catch (Error & e) {
// Ugly backwards compatibility hack.
if (e.msg().find("is not valid") != std::string::npos)
throw InvalidPath(e.what());
throw InvalidPath(e.info());
throw;
}
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) {
bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
}
info = std::make_shared<ValidPathInfo>(path.clone());
info = std::make_shared<ValidPathInfo>(StorePath(path));
auto deriver = readString(conn->from);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->narHash = Hash(readString(conn->from), HashType::SHA256);
info->narHash = Hash(readString(conn->from), htSHA256);
info->references = readStorePaths<StorePathSet>(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
@ -396,7 +396,7 @@ void RemoteStore::queryReferrers(const StorePath & path,
conn->to << wopQueryReferrers << printStorePath(path);
conn.processStderr();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from))
referrers.insert(i.clone());
referrers.insert(i);
}
@ -418,15 +418,6 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
}
PathSet RemoteStore::queryDerivationOutputNames(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputNames << printStorePath(path);
conn.processStderr();
return readStrings<PathSet>(conn->from);
}
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{
auto conn(getConnection());
@ -471,7 +462,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base::Base16, false);
<< info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
@ -495,7 +486,7 @@ StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
conn->to
<< wopAddToStore
<< name
<< ((hashAlgo == HashType::SHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashAlgo);

View file

@ -51,8 +51,6 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
StringSet queryDerivationOutputNames(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
@ -65,7 +63,7 @@ public:
std::shared_ptr<FSAccessor> accessor) override;
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s,

View file

@ -32,8 +32,10 @@ namespace nix {
struct S3Error : public Error
{
Aws::S3::S3Errors err;
S3Error(Aws::S3::S3Errors err, const FormatOrString & fs)
: Error(fs), err(err) { };
template<typename... Args>
S3Error(Aws::S3::S3Errors err, const Args & ... args)
: Error(args...), err(err) { };
};
/* Helper: given an Outcome<R, E>, return R in case of success, or
@ -68,9 +70,9 @@ static void initAWS()
shared.cc), so don't let aws-sdk-cpp override it. */
options.cryptoOptions.initAndCleanupOpenSSL = false;
if (verbosity >= Verbosity::Debug) {
if (verbosity >= lvlDebug) {
options.loggingOptions.logLevel =
verbosity == Verbosity::Debug
verbosity == lvlDebug
? Aws::Utils::Logging::LogLevel::Debug
: Aws::Utils::Logging::LogLevel::Trace;
options.loggingOptions.logger_create_fn = [options]() {
@ -109,7 +111,9 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy
auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries);
if (retry)
printError("AWS error '%s' (%s), will retry in %d ms",
error.GetExceptionName(), error.GetMessage(), CalculateDelayBeforeNextRetry(error, attemptedRetries));
error.GetExceptionName(),
error.GetMessage(),
CalculateDelayBeforeNextRetry(error, attemptedRetries));
return retry;
}
};
@ -249,7 +253,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
// If bucket listing is disabled, 404s turn into 403s
|| error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED)
return false;
throw Error(format("AWS error fetching '%s': %s") % path % error.GetMessage());
throw Error("AWS error fetching '%s': %s", path, error.GetMessage());
}
return true;

View file

@ -29,7 +29,7 @@ SQLite::SQLite(const Path & path, bool create)
{
if (sqlite3_open_v2(path.c_str(), &db,
SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
throw Error(format("cannot open SQLite database '%s'") % path);
throw Error("cannot open SQLite database '%s'", path);
if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
throwSQLiteError(db, "setting timeout");
@ -204,7 +204,10 @@ void handleSQLiteBusy(const SQLiteBusy & e)
if (now > lastWarned + 10) {
lastWarned = now;
printError("warning: %s", e.what());
logWarning({
.name = "Sqlite busy",
.hint = hintfmt(e.what())
});
}
/* Sleep for a while since retrying the transaction right away

View file

@ -3,7 +3,7 @@
#include <functional>
#include <string>
#include "types.hh"
#include "error.hh"
struct sqlite3;
struct sqlite3_stmt;

View file

@ -58,7 +58,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
addCommonSSHOpts(args);
if (socketPath != "")
args.insert(args.end(), {"-S", socketPath});
if (verbosity >= Verbosity::Chatty)
if (verbosity >= lvlChatty)
args.push_back("-v");
}
@ -110,7 +110,7 @@ Path SSHMaster::startMaster()
, "-o", "LocalCommand=echo started"
, "-o", "PermitLocalCommand=yes"
};
if (verbosity >= Verbosity::Chatty)
if (verbosity >= lvlChatty)
args.push_back("-v");
addCommonSSHOpts(args);
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());

View file

@ -23,7 +23,7 @@ bool Store::isInStore(const Path & path) const
Path Store::toStorePath(const Path & path) const
{
if (!isInStore(path))
throw Error(format("path '%1%' is not in the Nix store") % path);
throw Error("path '%1%' is not in the Nix store", path);
Path::size_type slash = path.find('/', storeDir.size() + 1);
if (slash == Path::npos)
return path;
@ -55,21 +55,13 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view path) const
{
auto [path2, outputs] = nix::parsePathWithOutputs(path);
return StorePathWithOutputs(followLinksToStorePath(path2), std::move(outputs));
}
string storePathToHash(const Path & path)
{
auto base = baseNameOf(path);
assert(base.size() >= storePathHashLen);
return string(base, 0, storePathHashLen);
return StorePathWithOutputs { followLinksToStorePath(path2), std::move(outputs) };
}
/* Store paths have the following form:
<store>/<h>-<name>
<realized-path> = <store>/<h>-<name>
where
@ -93,11 +85,14 @@ string storePathToHash(const Path & path)
<type> = one of:
"text:<r1>:<r2>:...<rN>"
for plain text files written to the store using
addTextToStore(); <r1> ... <rN> are the references of the
path.
"source"
addTextToStore(); <r1> ... <rN> are the store paths referenced
by this path, in the form described by <realized-path>
"source:<r1>:<r2>:...:<rN>:self"
for paths copied to the store using addToStore() when recursive
= true and hashAlgo = "sha256"
= true and hashAlgo = "sha256". Just like in the text case, we
can have the store paths referenced by the path.
Additionally, we can have an optional :self label to denote self
reference.
"output:<id>"
for either the outputs created by derivations, OR paths copied
to the store using addToStore() with recursive != true or
@ -125,6 +120,12 @@ string storePathToHash(const Path & path)
the contents of the path (or expected contents of the
path for fixed-output derivations)
Note that since an output derivation has always type output, while
something added by addToStore can have type output or source depending
on the hash, this means that the same input can be hashed differently
if added to the store via addToStore or via a derivation, in the sha256
recursive case.
It would have been nicer to handle fixed-output derivations under
"source", e.g. have something like "source:<rec><algo>", but we're
stuck with this for now...
@ -142,9 +143,9 @@ StorePath Store::makeStorePath(const string & type,
const Hash & hash, std::string_view name) const
{
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
string s = type + ":" + hash.to_string(Base::Base16) + ":" + storeDir + ":" + std::string(name);
auto h = compressHash(hashString(HashType::SHA256, s), 20);
return StorePath::make(h.hash, name);
string s = type + ":" + hash.to_string(Base16, true) + ":" + storeDir + ":" + std::string(name);
auto h = compressHash(hashString(htSHA256, s), 20);
return StorePath(h, name);
}
@ -178,13 +179,16 @@ StorePath Store::makeFixedOutputPath(
const StorePathSet & references,
bool hasSelfReference) const
{
if (hash.type == HashType::SHA256 && method == FileIngestionMethod::Recursive) {
if (hash.type == htSHA256 && method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
} else {
assert(references.empty());
return makeStorePath("output:out", hashString(HashType::SHA256,
"fixed:out:" + makeFileIngestionPrefix(method) +
hash.to_string(Base::Base16) + ":"), name);
return makeStorePath("output:out",
hashString(htSHA256,
"fixed:out:"
+ makeFileIngestionPrefix(method)
+ hash.to_string(Base16, true) + ":"),
name);
}
}
@ -192,7 +196,7 @@ StorePath Store::makeFixedOutputPath(
StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
const StorePathSet & references) const
{
assert(hash.type == HashType::SHA256);
assert(hash.type == htSHA256);
/* Stuff the references (if any) into the type. This is a bit
hacky, but we can't put them in `s' since that would be
ambiguous. */
@ -213,7 +217,7 @@ std::pair<StorePath, Hash> Store::computeStorePathForPath(std::string_view name,
StorePath Store::computeStorePathForText(const string & name, const string & s,
const StorePathSet & references) const
{
return makeTextPath(name, hashString(HashType::SHA256, s), references);
return makeTextPath(name, hashString(htSHA256, s), references);
}
@ -240,7 +244,7 @@ bool Store::PathInfoCacheValue::isKnownNow()
bool Store::isValidPath(const StorePath & storePath)
{
auto hashPart = storePathToHash(printStorePath(storePath));
std::string hashPart(storePath.hashPart());
{
auto state_(state.lock());
@ -308,7 +312,7 @@ void Store::queryPathInfo(const StorePath & storePath,
std::string hashPart;
try {
hashPart = storePathToHash(printStorePath(storePath));
hashPart = storePath.hashPart();
{
auto res = state.lock()->pathInfoCache.get(hashPart);
@ -426,7 +430,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i);
if (showHash) {
s += info->narHash.to_string(Base::Base16, false) + "\n";
s += info->narHash.to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str();
}
@ -458,7 +462,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath);
jsonPath
.attr("narHash", info->narHash.to_string(hashBase))
.attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narSize", info->narSize);
{
@ -501,7 +505,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
if (!narInfo->url.empty())
jsonPath.attr("url", narInfo->url);
if (narInfo->fileHash)
jsonPath.attr("downloadHash", narInfo->fileHash.to_string());
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base32, true));
if (narInfo->fileSize)
jsonPath.attr("downloadSize", narInfo->fileSize);
if (showClosureSize)
@ -550,7 +554,7 @@ void Store::buildPaths(const std::vector<StorePathWithOutputs> & paths, BuildMod
for (auto & path : paths) {
if (path.path.isDerivation())
unsupported("buildPaths");
paths2.insert(path.path.clone());
paths2.insert(path.path);
}
if (queryValidPaths(paths2).size() != paths2.size())
@ -564,7 +568,7 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
auto srcUri = srcStore->getUri();
auto dstUri = dstStore->getUri();
Activity act(*logger, Verbosity::Info, ActivityType::CopyPath,
Activity act(*logger, lvlInfo, actCopyPath,
srcUri == "local" || srcUri == "daemon"
? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri)
: dstUri == "local" || dstUri == "daemon"
@ -581,7 +585,7 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
StringSink sink;
srcStore->narFromPath({storePath}, sink);
auto info2 = make_ref<ValidPathInfo>(*info);
info2->narHash = hashString(HashType::SHA256, *sink.s);
info2->narHash = hashString(htSHA256, *sink.s);
if (!info->narSize) info2->narSize = sink.s->size();
if (info->ultimate) info2->ultimate = false;
info = info2;
@ -623,7 +627,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
if (missing.empty()) return;
Activity act(*logger, Verbosity::Info, ActivityType::CopyPaths, fmt("copying %d paths", missing.size()));
Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
std::atomic<size_t> nrDone{0};
std::atomic<size_t> nrFailed{0};
@ -649,7 +653,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
auto info = srcStore->queryPathInfo(srcStore->parseStorePath(storePath));
bytesExpected += info->narSize;
act.setExpected(ActivityType::CopyPath, bytesExpected);
act.setExpected(actCopyPath, bytesExpected);
return srcStore->printStorePathSet(info->references);
},
@ -668,7 +672,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
nrFailed++;
if (!settings.keepGoing)
throw e;
logger->log(Verbosity::Error, fmt("could not copy %s: %s", storePathS, e.what()));
logger->log(lvlError, fmt("could not copy %s: %s", storePathS, e.what()));
showProgress();
return;
}
@ -699,7 +703,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
if (hashGiven) {
string s;
getline(str, s);
info.narHash = Hash(s, HashType::SHA256);
info.narHash = Hash(s, htSHA256);
getline(str, s);
if (!string2Int(s, info.narSize)) throw Error("number expected");
}
@ -742,7 +746,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(Base::Base32) + ";"
+ narHash.to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}
@ -766,7 +770,7 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
return store.makeTextPath(path.name(), th.hash, references);
},
[&](FileSystemHash fsh) {
auto refs = cloneStorePathSet(references);
auto refs = references;
bool hasSelfReference = false;
if (refs.count(path)) {
hasSelfReference = true;
@ -906,7 +910,7 @@ std::list<ref<Store>> getDefaultSubstituters()
try {
stores.push_back(openStore(uri));
} catch (Error & e) {
printError("warning: %s", e.what());
logWarning(e.info());
}
};

View file

@ -178,9 +178,10 @@ struct ValidPathInfo
Strings shortRefs() const;
ValidPathInfo(const ValidPathInfo & other) = default;
ValidPathInfo(StorePath && path) : path(std::move(path)) { };
ValidPathInfo(const StorePath & path) : path(path) { };
ValidPathInfo(const ValidPathInfo & other) = default;
virtual ~ValidPathInfo() { }
};
@ -349,7 +350,7 @@ public:
path and the cryptographic hash of the contents of srcPath. */
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter) const;
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
/* Preparatory part of addTextToStore().
@ -420,10 +421,6 @@ public:
virtual StorePathSet queryDerivationOutputs(const StorePath & path)
{ unsupported("queryDerivationOutputs"); }
/* Query the output names of the derivation denoted by `path'. */
virtual StringSet queryDerivationOutputNames(const StorePath & path)
{ unsupported("queryDerivationOutputNames"); }
/* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */
virtual std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) = 0;
@ -447,12 +444,12 @@ public:
The function object `filter' can be used to exclude files (see
libutil/archive.hh). */
virtual StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
// FIXME: remove?
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair)
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
{
throw Error("addToStoreFromDump() is not supported by this store");
}
@ -546,7 +543,7 @@ public:
each path is included. */
void pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize,
Base hashBase = Base::Base32,
Base hashBase = Base32,
AllowInvalidFlag allowInvalid = DisallowInvalid);
/* Return the size of the closure of the specified path, that is,
@ -577,6 +574,9 @@ public:
ensurePath(). */
Derivation derivationFromPath(const StorePath & drvPath);
/* Read a derivation (which must already be valid). */
Derivation readDerivation(const StorePath & drvPath);
/* Place in `out' the set of all store paths in the file system
closure of `storePath'; that is, all paths than can be directly
or indirectly reached from it. `out' is not cleared. If
@ -722,10 +722,6 @@ public:
};
/* Extract the hash part of the given store path. */
string storePathToHash(const Path & path);
/* Copy a path from one store to another. */
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
const StorePath & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs);

View file

@ -36,7 +36,7 @@ typedef enum {
wopClearFailedPaths = 25,
wopQueryPathInfo = 26,
wopImportPaths = 27, // obsolete
wopQueryDerivationOutputNames = 28,
wopQueryDerivationOutputNames = 28, // obsolete
wopQueryPathFromHashPart = 29,
wopQuerySubstitutablePathInfos = 30,
wopQueryValidPaths = 31,

Some files were not shown because too many files have changed in this diff Show more