Merge branch 'hash-always-has-type' of github.com:obsidiansystems/nix into better-ca-parse-errors

This commit is contained in:
John Ericson 2020-06-29 18:40:34 +00:00
commit a83566e5bc
43 changed files with 320 additions and 217 deletions

6
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View file

@ -10,5 +10,5 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: cachix/install-nix-action@v8 - uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings - run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings

View file

@ -97,7 +97,7 @@ $ rm -rf /nix
installation on your system: installation on your system:
</para> </para>
<screen>sh &lt;(curl https://nixos.org/nix/install) --daemon</screen> <screen>sh &lt;(curl -L https://nixos.org/nix/install) --daemon</screen>
<para> <para>
The multi-user installation of Nix will create build users between The multi-user installation of Nix will create build users between
@ -178,7 +178,7 @@ sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
is a bit of a misnomer). To use this approach, just install Nix with: is a bit of a misnomer). To use this approach, just install Nix with:
</para> </para>
<screen>$ sh &lt;(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen> <screen>$ sh &lt;(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<para> <para>
If you don't like the sound of this, you'll want to weigh the If you don't like the sound of this, you'll want to weigh the
@ -429,7 +429,7 @@ LABEL=Nix\040Store /nix apfs rw,nobrowse
NixOS.org installation script: NixOS.org installation script:
<screen> <screen>
sh &lt;(curl https://nixos.org/nix/install) sh &lt;(curl -L https://nixos.org/nix/install)
</screen> </screen>
</para> </para>

View file

@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path) SV * queryPathHash(char * path)
PPCODE: PPCODE:
try { try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true); auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef); XPUSHs(&PL_sv_undef);
else else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true); auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime); mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize); mXPUSHi(info->narSize);

View file

@ -526,7 +526,7 @@ This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is exactly what commands it is running and why. However, the script is
run in a headless fashion, like this: run in a headless fashion, like this:
$ curl https://nixos.org/nix/install | sh $ curl -L https://nixos.org/nix/install | sh
or maybe in a CI pipeline. Because of that, we're going to skip the or maybe in a CI pipeline. Because of that, we're going to skip the
verbose output in the interest of brevity. verbose output in the interest of brevity.
@ -534,7 +534,7 @@ verbose output in the interest of brevity.
If you would like to If you would like to
see the output, try like this: see the output, try like this:
$ curl -o install-nix https://nixos.org/nix/install $ curl -L -o install-nix https://nixos.org/nix/install
$ sh ./install-nix $ sh ./install-nix
EOF EOF

View file

@ -113,7 +113,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
( (
echo "" echo ""
echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume." echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume."
echo "Use sh <(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually." echo "Use sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "See https://nixos.org/nix/manual/#sect-macos-installation" echo "See https://nixos.org/nix/manual/#sect-macos-installation"
echo "" echo ""
) >&2 ) >&2

View file

@ -1122,7 +1122,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_, static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v) Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
{ {
const auto path = evalSettings.pureEval && expectedHash ? const auto path = evalSettings.pureEval && expectedHash ?
path_ : path_ :
@ -1153,7 +1153,7 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
std::optional<StorePath> expectedStorePath; std::optional<StorePath> expectedStorePath;
if (expectedHash) if (expectedHash)
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name); expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
Path dstPath; Path dstPath;
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
dstPath = state.store->printStorePath(settings.readOnlyMode dstPath = state.store->printStorePath(settings.readOnlyMode
@ -1187,7 +1187,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.nixCode = NixCode { .errPos = pos } .nixCode = NixCode { .errPos = pos }
}); });
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v); addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
} }
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -1197,7 +1197,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
string name; string name;
Value * filterFun = nullptr; Value * filterFun = nullptr;
auto method = FileIngestionMethod::Recursive; auto method = FileIngestionMethod::Recursive;
Hash expectedHash; Hash expectedHash(htSHA256);
for (auto & attr : *args[0]->attrs) { for (auto & attr : *args[0]->attrs) {
const string & n(attr.name); const string & n(attr.name);

View file

@ -23,7 +23,7 @@ void emitTreeAttrs(
assert(tree.info.narHash); assert(tree.info.narHash);
mkString(*state.allocAttr(v, state.symbols.create("narHash")), mkString(*state.allocAttr(v, state.symbols.create("narHash")),
tree.info.narHash.to_string(SRI, true)); tree.info.narHash->to_string(SRI, true));
if (input->getRev()) { if (input->getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev()); mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
@ -147,7 +147,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path); : hashFile(htSHA256, path);
if (hash != *expectedHash) if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)); *url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
} }
if (state.allowedPaths) if (state.allowedPaths)

View file

@ -8,7 +8,7 @@ namespace nix::fetchers {
StorePath TreeInfo::computeStorePath(Store & store) const StorePath TreeInfo::computeStorePath(Store & store) const
{ {
assert(narHash); assert(narHash);
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "source"); return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
} }
} }

View file

@ -11,7 +11,7 @@ namespace nix::fetchers {
struct TreeInfo struct TreeInfo
{ {
Hash narHash; std::optional<Hash> narHash;
std::optional<uint64_t> revCount; std::optional<uint64_t> revCount;
std::optional<time_t> lastModified; std::optional<time_t> lastModified;

View file

@ -181,7 +181,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
((1.0 - (double) narCompressed->size() / nar->size()) * 100.0), ((1.0 - (double) narCompressed->size() / nar->size()) * 100.0),
duration); duration);
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar" narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" : + (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" : compression == "bzip2" ? ".bz2" :
compression == "br" ? ".br" : compression == "br" ? ".br" :
@ -338,7 +338,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
method for very large paths, but `copyPath' is mainly used for method for very large paths, but `copyPath' is mainly used for
small files. */ small files. */
StringSink sink; StringSink sink;
Hash h; std::optional<Hash> h;
if (method == FileIngestionMethod::Recursive) { if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter); dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, *sink.s); h = hashString(hashAlgo, *sink.s);
@ -348,7 +348,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s); h = hashString(hashAlgo, s);
} }
ValidPathInfo info(makeFixedOutputPath(method, h, name)); ValidPathInfo info(makeFixedOutputPath(method, *h, name));
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs, nullptr); addToStore(info, source, repair, CheckSigs, nullptr);

View file

@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
/* Hard-linking fails if we exceed the maximum link count on a /* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just 'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case? */ bind-mount in this case?
if (errno != EMLINK)
It can also fail with EPERM in BeegFS v7 and earlier versions
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM)
throw SysError("linking '%s' to '%s'", to, from); throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to); copyPath(from, to);
} }
@ -3730,8 +3733,8 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by /* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */ the derivation to its content-addressed location. */
Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive
? hashPath(*i.second.hash->hash.type, actualPath).first ? hashPath(i.second.hash->hash.type, actualPath).first
: hashFile(*i.second.hash->hash.type, actualPath); : hashFile(i.second.hash->hash.type, actualPath);
auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name()); auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name());
@ -3780,8 +3783,10 @@ void DerivationGoal::registerOutputs()
time. The hash is stored in the database so that we can time. The hash is stored in the database so that we can
verify later on whether nobody has messed with the store. */ verify later on whether nobody has messed with the store. */
debug("scanning for references inside '%1%'", path); debug("scanning for references inside '%1%'", path);
HashResult hash; // HashResult hash;
auto references = worker.store.parseStorePathSet(scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths), hash)); auto pathSetAndHash = scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths));
auto references = worker.store.parseStorePathSet(pathSetAndHash.first);
HashResult hash = pathSetAndHash.second;
if (buildMode == bmCheck) { if (buildMode == bmCheck) {
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue; if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
@ -5003,7 +5008,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path))) if (!pathExists(store.printStorePath(path)))
res = false; res = false;
else { else {
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path)); HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
Hash nullHash(htSHA256); Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first; res = info->narHash == nullHash || info->narHash == current.first;
} }

View file

@ -58,14 +58,17 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
} }
}; };
/* We always have one output, and if it's a fixed-output derivation (as
checked below) it must be the only output */
auto & output = drv.outputs.begin()->second;
/* Try the hashed mirrors first. */ /* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat") if (output.hash && output.hash->method == FileIngestionMethod::Flat)
for (auto hashedMirror : settings.hashedMirrors.get()) for (auto hashedMirror : settings.hashedMirrors.get())
try { try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
auto ht = parseHashTypeOpt(getAttr("outputHashAlgo")); auto & h = output.hash->hash;
auto h = Hash(getAttr("outputHash"), ht); fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
return; return;
} catch (Error & e) { } catch (Error & e) {
debug(e.what()); debug(e.what());

View file

@ -4,7 +4,7 @@
namespace nix { namespace nix {
std::string FixedOutputHash::printMethodAlgo() const { std::string FixedOutputHash::printMethodAlgo() const {
return makeFileIngestionPrefix(method) + printHashType(*hash.type); return makeFileIngestionPrefix(method) + printHashType(hash.type);
} }
std::string makeFileIngestionPrefix(const FileIngestionMethod m) { std::string makeFileIngestionPrefix(const FileIngestionMethod m) {

View file

@ -314,7 +314,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto hash = store->queryPathInfo(path)->narHash; auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork(); logger->stopWork();
to << hash.to_string(Base16, false); to << hash->to_string(Base16, false);
break; break;
} }
@ -646,7 +646,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17) if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1; to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "") to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false); << info->narHash->to_string(Base16, false);
writeStorePaths(*store, to, info->references); writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize; to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {

View file

@ -404,7 +404,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{ {
auto path = store.parseStorePath(readString(in)); auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in); auto hashAlgo = readString(in);
const auto hash = readString(in); auto hash = readString(in);
std::optional<FixedOutputHash> fsh; std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") { if (hashAlgo != "") {
@ -413,7 +413,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2); hashAlgo = string(hashAlgo, 2);
} }
const HashType hashType = parseHashType(hashAlgo); auto hashType = parseHashType(hashAlgo);
fsh = FixedOutputHash { fsh = FixedOutputHash {
.method = std::move(method), .method = std::move(method),
.hash = Hash(hash, hashType), .hash = Hash(hash, hashType),
@ -463,11 +463,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{ {
out << drv.outputs.size(); out << drv.outputs.size();
for (auto & i : drv.outputs) for (auto & i : drv.outputs) {
out << i.first out << i.first
<< store.printStorePath(i.second.path) << store.printStorePath(i.second.path);
<< i.second.hash->printMethodAlgo() if (i.second.hash) {
<< i.second.hash->hash.to_string(Base16, false); out << i.second.hash->printMethodAlgo()
<< i.second.hash->hash.to_string(Base16, false);
} else {
out << "" << "";
}
}
writeStorePaths(store, out, drv.inputSrcs); writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;
out << drv.env.size(); out << drv.env.size();

View file

@ -55,9 +55,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines. filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */ Don't complain if the stored hash is zero (unknown). */
Hash hash = hashAndWriteSink.currentHash(); Hash hash = hashAndWriteSink.currentHash();
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type)) if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!", throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true)); printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
hashAndWriteSink hashAndWriteSink
<< exportMagic << exportMagic

View file

@ -113,7 +113,7 @@ struct LegacySSHStore : public Store
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
auto s = readString(conn->from); auto s = readString(conn->from);
info->narHash = s.empty() ? Hash() : Hash(s); info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s};
info->ca = parseContentAddressOpt(readString(conn->from)); info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
} }
@ -139,7 +139,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar << cmdAddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash->to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); writeStorePaths(*this, conn->to, info.references);
conn->to conn->to
<< info.registrationTime << info.registrationTime

View file

@ -586,7 +586,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use() state.stmtRegisterValidPath.use()
(printStorePath(info.path)) (printStorePath(info.path))
(info.narHash.to_string(Base16, true)) (info.narHash->to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime) (info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
@ -686,7 +686,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{ {
state.stmtUpdatePathInfo.use() state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
(info.narHash.to_string(Base16, true)) (info.narHash->to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca) (renderContentAddress(info.ca), (bool) info.ca)
@ -897,7 +897,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths; StorePathSet paths;
for (auto & i : infos) { for (auto & i : infos) {
assert(i.narHash.type == htSHA256); assert(i.narHash && i.narHash->type == htSHA256);
if (isValidPath_(*state, i.path)) if (isValidPath_(*state, i.path))
updatePathInfo(*state, i); updatePathInfo(*state, i);
else else
@ -1010,7 +1010,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash) if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s", throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true)); printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize) if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s", throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@ -1067,12 +1067,12 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
the path in the database. We may just have computed it the path in the database. We may just have computed it
above (if called with recursive == true and hashAlgo == above (if called with recursive == true and hashAlgo ==
sha256); otherwise, compute it here. */ sha256); otherwise, compute it here. */
HashResult hash; HashResult hash = method == FileIngestionMethod::Recursive
if (method == FileIngestionMethod::Recursive) { ? HashResult {
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump); hashAlgo == htSHA256 ? h : hashString(htSHA256, dump),
hash.second = dump.size(); dump.size(),
} else }
hash = hashPath(htSHA256, realPath); : hashPath(htSHA256, realPath);
optimisePath(realPath); // FIXME: combine with hashPath() optimisePath(realPath); // FIXME: combine with hashPath()
@ -1255,9 +1255,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path)) if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(*info->narHash.type); hashSink = std::make_unique<HashSink>(info->narHash->type);
else else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink); dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish(); auto current = hashSink->finish();
@ -1266,7 +1266,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({ logError({
.name = "Invalid hash - path modified", .name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'", .hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true)) printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
}); });
if (repair) repairPath(i); else errors = true; if (repair) repairPath(i); else errors = true;
} else { } else {

View file

@ -230,9 +230,9 @@ public:
(std::string(info->path.name())) (std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0) (narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash) (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string(Base32, true)) (info->narHash->to_string(Base32, true))
(info->narSize) (info->narSize)
(concatStringsSep(" ", info->shortRefs())) (concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -7,15 +7,14 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack : ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
{ {
auto corrupt = [&]() { auto corrupt = [&]() {
throw Error("NAR info file '%1%' is corrupt", whence); return Error("NAR info file '%1%' is corrupt", whence);
}; };
auto parseHashField = [&](const string & s) { auto parseHashField = [&](const string & s) {
try { try {
return Hash(s); return Hash(s);
} catch (BadHash &) { } catch (BadHash &) {
corrupt(); throw corrupt();
return Hash(); // never reached
} }
}; };
@ -25,12 +24,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
while (pos < s.size()) { while (pos < s.size()) {
size_t colon = s.find(':', pos); size_t colon = s.find(':', pos);
if (colon == std::string::npos) corrupt(); if (colon == std::string::npos) throw corrupt();
std::string name(s, pos, colon - pos); std::string name(s, pos, colon - pos);
size_t eol = s.find('\n', colon + 2); size_t eol = s.find('\n', colon + 2);
if (eol == std::string::npos) corrupt(); if (eol == std::string::npos) throw corrupt();
std::string value(s, colon + 2, eol - colon - 2); std::string value(s, colon + 2, eol - colon - 2);
@ -45,16 +44,16 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "FileHash") else if (name == "FileHash")
fileHash = parseHashField(value); fileHash = parseHashField(value);
else if (name == "FileSize") { else if (name == "FileSize") {
if (!string2Int(value, fileSize)) corrupt(); if (!string2Int(value, fileSize)) throw corrupt();
} }
else if (name == "NarHash") else if (name == "NarHash")
narHash = parseHashField(value); narHash = parseHashField(value);
else if (name == "NarSize") { else if (name == "NarSize") {
if (!string2Int(value, narSize)) corrupt(); if (!string2Int(value, narSize)) throw corrupt();
} }
else if (name == "References") { else if (name == "References") {
auto refs = tokenizeString<Strings>(value, " "); auto refs = tokenizeString<Strings>(value, " ");
if (!references.empty()) corrupt(); if (!references.empty()) throw corrupt();
for (auto & r : refs) for (auto & r : refs)
references.insert(StorePath(r)); references.insert(StorePath(r));
} }
@ -67,7 +66,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "Sig") else if (name == "Sig")
sigs.insert(value); sigs.insert(value);
else if (name == "CA") { else if (name == "CA") {
if (ca) corrupt(); if (!value.empty()) throw corrupt();
// FIXME: allow blank ca or require skipping field? // FIXME: allow blank ca or require skipping field?
ca = parseContentAddressOpt(value); ca = parseContentAddressOpt(value);
} }
@ -77,7 +76,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (compression == "") compression = "bzip2"; if (compression == "") compression = "bzip2";
if (!havePath || url.empty() || narSize == 0 || !narHash) corrupt(); if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
} }
std::string NarInfo::to_string(const Store & store) const std::string NarInfo::to_string(const Store & store) const
@ -87,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n"; res += "URL: " + url + "\n";
assert(compression != ""); assert(compression != "");
res += "Compression: " + compression + "\n"; res += "Compression: " + compression + "\n";
assert(fileHash.type == htSHA256); assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n"; res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256); assert(narHash && narHash->type == htSHA256);
res += "NarHash: " + narHash.to_string(Base32, true) + "\n"; res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -10,7 +10,7 @@ struct NarInfo : ValidPathInfo
{ {
std::string url; std::string url;
std::string compression; std::string compression;
Hash fileHash; std::optional<Hash> fileHash;
uint64_t fileSize = 0; uint64_t fileSize = 0;
std::string system; std::string system;

View file

@ -79,8 +79,8 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
} }
PathSet scanForReferences(const string & path, std::pair<PathSet, HashResult> scanForReferences(const string & path,
const PathSet & refs, HashResult & hash) const PathSet & refs)
{ {
RefScanSink sink; RefScanSink sink;
std::map<string, Path> backMap; std::map<string, Path> backMap;
@ -112,9 +112,9 @@ PathSet scanForReferences(const string & path,
found.insert(j->second); found.insert(j->second);
} }
hash = sink.hashSink.finish(); auto hash = sink.hashSink.finish();
return found; return std::pair<PathSet, HashResult>(found, hash);
} }

View file

@ -5,8 +5,7 @@
namespace nix { namespace nix {
PathSet scanForReferences(const Path & path, const PathSet & refs, std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
HashResult & hash);
struct RewritingSink : Sink struct RewritingSink : Sink
{ {

View file

@ -462,7 +462,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar conn->to << wopAddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash->to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca) << info.ultimate << info.sigs << renderContentAddress(info.ca)

View file

@ -430,7 +430,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i); auto info = queryPathInfo(i);
if (showHash) { if (showHash) {
s += info->narHash.to_string(Base16, false) + "\n"; s += info->narHash->to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str(); s += (format("%1%\n") % info->narSize).str();
} }
@ -462,7 +462,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath); auto info = queryPathInfo(storePath);
jsonPath jsonPath
.attr("narHash", info->narHash.to_string(hashBase, true)) .attr("narHash", info->narHash->to_string(hashBase, true))
.attr("narSize", info->narSize); .attr("narSize", info->narSize);
{ {
@ -505,7 +505,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
if (!narInfo->url.empty()) if (!narInfo->url.empty())
jsonPath.attr("url", narInfo->url); jsonPath.attr("url", narInfo->url);
if (narInfo->fileHash) if (narInfo->fileHash)
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base32, true)); jsonPath.attr("downloadHash", narInfo->fileHash->to_string(Base32, true));
if (narInfo->fileSize) if (narInfo->fileSize)
jsonPath.attr("downloadSize", narInfo->fileSize); jsonPath.attr("downloadSize", narInfo->fileSize);
if (showClosureSize) if (showClosureSize)
@ -746,7 +746,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
store.printStorePath(path)); store.printStorePath(path));
return return
"1;" + store.printStorePath(path) + ";" "1;" + store.printStorePath(path) + ";"
+ narHash.to_string(Base32, true) + ";" + narHash->to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";" + std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references)); + concatStringsSep(",", store.printStorePathSet(references));
} }

View file

@ -115,7 +115,8 @@ struct ValidPathInfo
{ {
StorePath path; StorePath path;
std::optional<StorePath> deriver; std::optional<StorePath> deriver;
Hash narHash; // TODO document this
std::optional<Hash> narHash;
StorePathSet references; StorePathSet references;
time_t registrationTime = 0; time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown uint64_t narSize = 0; // 0 = unknown

View file

@ -1,6 +1,7 @@
#pragma once #pragma once
#include <boost/format.hpp> #include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string> #include <string>
#include "ansicolor.hh" #include "ansicolor.hh"
@ -103,7 +104,9 @@ class hintformat
public: public:
hintformat(const string &format) :fmt(format) hintformat(const string &format) :fmt(format)
{ {
fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); fmt.exceptions(boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
} }
hintformat(const hintformat &hf) hintformat(const hintformat &hf)
@ -117,6 +120,13 @@ public:
return *this; return *this;
} }
template<class T>
hintformat& operator%(const normaltxt<T> &value)
{
fmt % value.value;
return *this;
}
std::string str() const std::string str() const
{ {
return fmt.str(); return fmt.str();
@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f; return f;
} }
inline hintformat hintfmt(std::string plain_string)
{
// we won't be receiving any args in this case, so just print the original string
return hintfmt("%s", normaltxt(plain_string));
}
} }

View file

@ -16,16 +16,19 @@
namespace nix { namespace nix {
static size_t regularHashSize(HashType type) {
switch (type) {
case htMD5: return md5HashSize;
case htSHA1: return sha1HashSize;
case htSHA256: return sha256HashSize;
case htSHA512: return sha512HashSize;
}
abort();
}
void Hash::init() void Hash::init()
{ {
if (!type) abort(); hashSize = regularHashSize(type);
switch (*type) {
case htMD5: hashSize = md5HashSize; break;
case htSHA1: hashSize = sha1HashSize; break;
case htSHA256: hashSize = sha256HashSize; break;
case htSHA512: hashSize = sha512HashSize; break;
}
assert(hashSize <= maxHashSize); assert(hashSize <= maxHashSize);
memset(hash, 0, maxHashSize); memset(hash, 0, maxHashSize);
} }
@ -101,22 +104,16 @@ static string printHash32(const Hash & hash)
string printHash16or32(const Hash & hash) string printHash16or32(const Hash & hash)
{ {
assert(hash.type);
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
} }
HashType assertInitHashType(const Hash & h) {
if (h.type)
return *h.type;
else
abort();
}
std::string Hash::to_string(Base base, bool includeType) const std::string Hash::to_string(Base base, bool includeType) const
{ {
std::string s; std::string s;
if (base == SRI || includeType) { if (base == SRI || includeType) {
s += printHashType(assertInitHashType(*this)); s += printHashType(type);
s += base == SRI ? '-' : ':'; s += base == SRI ? '-' : ':';
} }
switch (base) { switch (base) {
@ -137,60 +134,66 @@ std::string Hash::to_string(Base base, bool includeType) const
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { } Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { } Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
Hash::Hash(std::string_view s, std::optional<HashType> type) Hash::Hash(std::string_view original, std::optional<HashType> optType)
: type(type)
{ {
auto rest = original;
size_t pos = 0; size_t pos = 0;
bool isSRI = false; bool isSRI = false;
auto sep = s.find(':'); // Parse the has type before the separater, if there was one.
if (sep == string::npos) { std::optional<HashType> optParsedType;
sep = s.find('-'); {
if (sep != string::npos) { auto sep = rest.find(':');
isSRI = true; if (sep == std::string_view::npos) {
} else if (! type) sep = rest.find('-');
throw BadHash("hash '%s' does not include a type", s); if (sep != std::string_view::npos)
isSRI = true;
}
if (sep != std::string_view::npos) {
auto hashRaw = rest.substr(0, sep);
optParsedType = parseHashType(hashRaw);
rest = rest.substr(sep + 1);
}
} }
if (sep != string::npos) { // Either the string or user must provide the type, if they both do they
string hts = string(s, 0, sep); // must agree.
this->type = parseHashType(hts); if (!optParsedType && !optType) {
if (!this->type) throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context.", rest);
throw BadHash("unknown hash type '%s'", hts); } else {
if (type && type != this->type) this->type = optParsedType ? *optParsedType : *optType;
throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type)); if (optParsedType && optType && *optParsedType != *optType)
pos = sep + 1; throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
} }
init(); init();
size_t size = s.size() - pos; if (!isSRI && rest.size() == base16Len()) {
if (!isSRI && size == base16Len()) {
auto parseHexDigit = [&](char c) { auto parseHexDigit = [&](char c) {
if (c >= '0' && c <= '9') return c - '0'; if (c >= '0' && c <= '9') return c - '0';
if (c >= 'A' && c <= 'F') return c - 'A' + 10; if (c >= 'A' && c <= 'F') return c - 'A' + 10;
if (c >= 'a' && c <= 'f') return c - 'a' + 10; if (c >= 'a' && c <= 'f') return c - 'a' + 10;
throw BadHash("invalid base-16 hash '%s'", s); throw BadHash("invalid base-16 hash '%s'", original);
}; };
for (unsigned int i = 0; i < hashSize; i++) { for (unsigned int i = 0; i < hashSize; i++) {
hash[i] = hash[i] =
parseHexDigit(s[pos + i * 2]) << 4 parseHexDigit(rest[pos + i * 2]) << 4
| parseHexDigit(s[pos + i * 2 + 1]); | parseHexDigit(rest[pos + i * 2 + 1]);
} }
} }
else if (!isSRI && size == base32Len()) { else if (!isSRI && rest.size() == base32Len()) {
for (unsigned int n = 0; n < size; ++n) { for (unsigned int n = 0; n < rest.size(); ++n) {
char c = s[pos + size - n - 1]; char c = rest[rest.size() - n - 1];
unsigned char digit; unsigned char digit;
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */ for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
if (base32Chars[digit] == c) break; if (base32Chars[digit] == c) break;
if (digit >= 32) if (digit >= 32)
throw BadHash("invalid base-32 hash '%s'", s); throw BadHash("invalid base-32 hash '%s'", original);
unsigned int b = n * 5; unsigned int b = n * 5;
unsigned int i = b / 8; unsigned int i = b / 8;
unsigned int j = b % 8; unsigned int j = b % 8;
@ -200,21 +203,21 @@ Hash::Hash(std::string_view s, std::optional<HashType> type)
hash[i + 1] |= digit >> (8 - j); hash[i + 1] |= digit >> (8 - j);
} else { } else {
if (digit >> (8 - j)) if (digit >> (8 - j))
throw BadHash("invalid base-32 hash '%s'", s); throw BadHash("invalid base-32 hash '%s'", original);
} }
} }
} }
else if (isSRI || size == base64Len()) { else if (isSRI || rest.size() == base64Len()) {
auto d = base64Decode(s.substr(pos)); auto d = base64Decode(rest);
if (d.size() != hashSize) if (d.size() != hashSize)
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", s); throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", original);
assert(hashSize); assert(hashSize);
memcpy(hash, d.data(), hashSize); memcpy(hash, d.data(), hashSize);
} }
else else
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type)); throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
} }
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht) Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
@ -267,7 +270,7 @@ static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
} }
Hash hashString(HashType ht, const string & s) Hash hashString(HashType ht, std::string_view s)
{ {
Ctx ctx; Ctx ctx;
Hash hash(ht); Hash hash(ht);
@ -334,7 +337,7 @@ HashResult hashPath(
Hash compressHash(const Hash & hash, unsigned int newSize) Hash compressHash(const Hash & hash, unsigned int newSize)
{ {
Hash h; Hash h(hash.type);
h.hashSize = newSize; h.hashSize = newSize;
for (unsigned int i = 0; i < hash.hashSize; ++i) for (unsigned int i = 0; i < hash.hashSize; ++i)
h.hash[i % newSize] ^= hash.hash[i]; h.hash[i % newSize] ^= hash.hash[i];
@ -363,14 +366,15 @@ HashType parseHashType(std::string_view s)
string printHashType(HashType ht) string printHashType(HashType ht)
{ {
switch (ht) { switch (ht) {
case htMD5: return "md5"; break; case htMD5: return "md5";
case htSHA1: return "sha1"; break; case htSHA1: return "sha1";
case htSHA256: return "sha256"; break; case htSHA256: return "sha256";
case htSHA512: return "sha512"; break; case htSHA512: return "sha512";
default:
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
abort();
} }
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
abort();
} }
} }

View file

@ -10,7 +10,7 @@ namespace nix {
MakeError(BadHash, Error); MakeError(BadHash, Error);
enum HashType : char { htMD5, htSHA1, htSHA256, htSHA512 }; enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
const int md5HashSize = 16; const int md5HashSize = 16;
@ -25,14 +25,11 @@ enum Base : int { Base64, Base32, Base16, SRI };
struct Hash struct Hash
{ {
static const unsigned int maxHashSize = 64; constexpr static size_t maxHashSize = 64;
unsigned int hashSize = 0; size_t hashSize = 0;
unsigned char hash[maxHashSize] = {}; uint8_t hash[maxHashSize] = {};
std::optional<HashType> type = {}; HashType type;
/* Create an unset hash object. */
Hash() { };
/* Create a zero-filled hash object. */ /* Create a zero-filled hash object. */
Hash(HashType type) : type(type) { init(); }; Hash(HashType type) : type(type) { init(); };
@ -105,7 +102,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht);
string printHash16or32(const Hash & hash); string printHash16or32(const Hash & hash);
/* Compute the hash of the given string. */ /* Compute the hash of the given string. */
Hash hashString(HashType ht, const string & s); Hash hashString(HashType ht, std::string_view s);
/* Compute the hash of the given file. */ /* Compute the hash of the given file. */
Hash hashFile(HashType ht, const Path & path); Hash hashFile(HashType ht, const Path & path);

View file

@ -1,6 +1,7 @@
#include "logging.hh" #include "logging.hh"
#include "nixexpr.hh" #include "nixexpr.hh"
#include "util.hh" #include "util.hh"
#include <fstream>
#include <gtest/gtest.h> #include <gtest/gtest.h>
@ -42,7 +43,7 @@ namespace nix {
logger->logEI(ei); logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
} }
} }
@ -60,8 +61,7 @@ namespace nix {
logError(e.info()); logError(e.info());
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
} }
} }
@ -69,9 +69,9 @@ namespace nix {
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
logger->logEI({ .level = lvlInfo, logger->logEI({ .level = lvlInfo,
.name = "Info name", .name = "Info name",
.description = "Info description", .description = "Info description",
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n"); ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
@ -85,7 +85,7 @@ namespace nix {
logger->logEI({ .level = lvlTalkative, logger->logEI({ .level = lvlTalkative,
.name = "Talkative name", .name = "Talkative name",
.description = "Talkative description", .description = "Talkative description",
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n"); ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
@ -99,7 +99,7 @@ namespace nix {
logger->logEI({ .level = lvlChatty, logger->logEI({ .level = lvlChatty,
.name = "Chatty name", .name = "Chatty name",
.description = "Talkative description", .description = "Talkative description",
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n"); ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
@ -113,7 +113,7 @@ namespace nix {
logger->logEI({ .level = lvlDebug, logger->logEI({ .level = lvlDebug,
.name = "Debug name", .name = "Debug name",
.description = "Debug description", .description = "Debug description",
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
@ -127,7 +127,7 @@ namespace nix {
logger->logEI({ .level = lvlVomit, logger->logEI({ .level = lvlVomit,
.name = "Vomit name", .name = "Vomit name",
.description = "Vomit description", .description = "Vomit description",
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n"); ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
@ -144,7 +144,7 @@ namespace nix {
logError({ logError({
.name = "name", .name = "name",
.description = "error description", .description = "error description",
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
@ -160,13 +160,13 @@ namespace nix {
.name = "error name", .name = "error name",
.description = "error with code lines", .description = "error with code lines",
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13), .errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = "previous line of code", .prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code", .errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code", .nextLineOfCode = "next line of code",
}}); }});
@ -183,10 +183,10 @@ namespace nix {
.name = "error name", .name = "error name",
.description = "error without any code lines.", .description = "error without any code lines.",
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13) .errPos = Pos(problem_file, 40, 13)
}}); }});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
@ -202,7 +202,7 @@ namespace nix {
.name = "error name", .name = "error name",
.hint = hintfmt("hint %1%", "only"), .hint = hintfmt("hint %1%", "only"),
.nixCode = NixCode { .nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13) .errPos = Pos(problem_file, 40, 13)
}}); }});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
@ -218,10 +218,10 @@ namespace nix {
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
logWarning({ logWarning({
.name = "name", .name = "name",
.description = "error description", .description = "error description",
.hint = hintfmt("there was a %1%", "warning"), .hint = hintfmt("there was a %1%", "warning"),
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
@ -238,13 +238,13 @@ namespace nix {
.name = "warning name", .name = "warning name",
.description = "warning description", .description = "warning description",
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13), .errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::nullopt, .prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code", .errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt .nextLineOfCode = std::nullopt
}}); }});
@ -252,4 +252,41 @@ namespace nix {
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
} }
/* ----------------------------------------------------------------------------
* hintfmt
* --------------------------------------------------------------------------*/
TEST(hintfmt, percentStringWithoutArgs) {
const char *teststr = "this is 100%s correct!";
ASSERT_STREQ(
hintfmt(teststr).str().c_str(),
teststr);
}
TEST(hintfmt, fmtToHintfmt) {
ASSERT_STREQ(
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
"the color of this this text is not yellow");
}
TEST(hintfmt, tooFewArguments) {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
"only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
}
TEST(hintfmt, tooManyArguments) {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
"what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
} }

View file

@ -153,7 +153,7 @@ static int _main(int argc, char * * argv)
/* If an expected hash is given, the file may already exist in /* If an expected hash is given, the file may already exist in
the store. */ the store. */
Hash hash, expectedHash(ht); Hash hash(ht), expectedHash(ht);
std::optional<StorePath> storePath; std::optional<StorePath> storePath;
if (args.size() == 2) { if (args.size() == 2) {
expectedHash = Hash(args[1], ht); expectedHash = Hash(args[1], ht);

View file

@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j); auto info = store->queryPathInfo(j);
if (query == qHash) { if (query == qHash) {
assert(info->narHash.type == htSHA256); assert(info->narHash && info->narHash->type == htSHA256);
cout << fmt("%s\n", info->narHash.to_string(Base32, true)); cout << fmt("%s\n", info->narHash->to_string(Base32, true));
} else if (query == qSize) } else if (query == qSize)
cout << fmt("%d\n", info->narSize); cout << fmt("%d\n", info->narSize);
} }
@ -725,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i); auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path); auto info = store->queryPathInfo(path);
HashSink sink(*info->narHash.type); HashSink sink(info->narHash->type);
store->narFromPath(path, sink); store->narFromPath(path, sink);
auto current = sink.finish(); auto current = sink.finish();
if (current.first != info->narHash) { if (current.first != info->narHash) {
@ -734,7 +734,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
.hint = hintfmt( .hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'", "path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path), store->printStorePath(path),
info->narHash.to_string(Base32, true), info->narHash->to_string(Base32, true),
current.first.to_string(Base32, true)) current.first.to_string(Base32, true))
}); });
status = 1; status = 1;
@ -864,7 +864,9 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize out << info->narSize // downloadSize
<< info->narSize; << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4) if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs; out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
<< renderContentAddress(info->ca)
<< info->sigs;
} catch (InvalidPath &) { } catch (InvalidPath &) {
} }
} }

View file

@ -50,7 +50,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = std::optional { FixedOutputHash { info.ca = std::optional { FixedOutputHash {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = info.narHash, .hash = *info.narHash,
} }; } };
if (!dryRun) { if (!dryRun) {

View file

@ -139,7 +139,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
.path = shellOutPath, .path = shellOutPath,
.hash = FixedOutputHash { .hash = FixedOutputHash {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = Hash { }, .hash = Hash { htSHA256 },
}, },
}); });
drv.env["out"] = store->printStorePath(shellOutPath); drv.env["out"] = store->printStorePath(shellOutPath);

View file

@ -84,7 +84,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = info.narHash, .hash = *info.narHash,
}; };
if (!json) if (!json)

View file

@ -88,15 +88,15 @@ struct CmdVerify : StorePathsCommand
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca) if (!info->ca)
hashSink = std::make_unique<HashSink>(*info->narHash.type); hashSink = std::make_unique<HashSink>(info->narHash->type);
else else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink); store->narFromPath(info->path, *hashSink);
auto hash = hashSink->finish(); auto hash = hashSink->finish();
if (hash.first != info->narHash) { if (hash.first != *info->narHash) {
corrupted++; corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path)); act2.result(resCorruptedPath, store->printStorePath(info->path));
logError({ logError({
@ -104,7 +104,7 @@ struct CmdVerify : StorePathsCommand
.hint = hintfmt( .hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'", "path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path), store->printStorePath(info->path),
info->narHash.to_string(Base32, true), info->narHash->to_string(Base32, true),
hash.first.to_string(Base32, true)) hash.first.to_string(Base32, true))
}); });
} }

View file

@ -1,23 +1,39 @@
{ busybox }:
with import ./config.nix; with import ./config.nix;
let let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation { input1 = mkDerivation {
name = "build-hook-input-1"; shell = busybox;
buildCommand = "mkdir $out; echo FOO > $out/foo"; name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"]; requiredSystemFeatures = ["foo"];
}; };
input2 = mkDerivation { input2 = mkDerivation {
name = "build-hook-input-2"; shell = busybox;
buildCommand = "mkdir $out; echo BAR > $out/bar"; name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
}; };
in in
mkDerivation { mkDerivation {
name = "build-hook"; shell = busybox;
builder = ./dependencies.builder0.sh; name = "build-remote";
input1 = " " + input1 + "/."; buildCommand =
input2 = " ${input2}/."; ''
read x < ${input1}
read y < ${input2}
echo $x$y > $out
'';
} }

View file

@ -3,22 +3,29 @@ source common.sh
clearStore clearStore
if ! canUseSandbox; then exit; fi if ! canUseSandbox; then exit; fi
if [[ ! $SHELL =~ /nix/store ]]; then exit; fi if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/store0 || true chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/store1 || true chmod -R u+w $TEST_ROOT/machine1 || true
rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1 chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \ unset NIX_STORE_DIR
--sandbox-paths /nix/store --sandbox-build-dir /build-tmp \ unset NIX_STATE_DIR
--builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
--system-features foo --system-features foo
outPath=$TEST_ROOT/result outPath=$(readlink -f $TEST_ROOT/result)
cat $outPath/foobar | grep FOOBAR cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
# Ensure that input1 was built on store1 due to the required feature. # Ensure that input1 was built on store2 due to the required feature.
p=$(readlink -f $outPath/input-2) (! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
(! nix path-info --store $TEST_ROOT/store0 --all | grep builder-build-hook-input-1.sh) nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh
nix path-info --store $TEST_ROOT/store1 --all | grep builder-build-hook-input-1.sh

View file

@ -35,6 +35,7 @@ export xmllint="@xmllint@"
export SHELL="@bash@" export SHELL="@bash@"
export PAGER=cat export PAGER=cat
export HAVE_SODIUM="@HAVE_SODIUM@" export HAVE_SODIUM="@HAVE_SODIUM@"
export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@ export version=@PACKAGE_VERSION@
export system=@system@ export system=@system@

View file

@ -2,6 +2,8 @@ source common.sh
clearStore clearStore
rm -f $TEST_ROOT/result
export REMOTE_STORE=$TEST_ROOT/remote_store export REMOTE_STORE=$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store # Build the dependencies and push them to the remote store

View file

@ -5,6 +5,8 @@ if [[ $(uname) != Linux ]]; then exit; fi
clearStore clearStore
rm -f $TEST_ROOT/result
export unreachable=$(nix add-to-store ./recursive.sh) export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '( nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '(

View file

@ -2,6 +2,8 @@ source common.sh
clearStore clearStore
rm -f $TEST_ROOT/result
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
[[ $(cat $TEST_ROOT/result/foo) = bar ]] [[ $(cat $TEST_ROOT/result/foo) = bar ]]