mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-10 16:26:18 +02:00
Merge remote-tracking branch 'upstream/master' into hash-always-has-type
This commit is contained in:
commit
98e5d1af03
29 changed files with 329 additions and 176 deletions
|
@ -1,7 +1,7 @@
|
||||||
#include "get-drvs.hh"
|
#include "get-drvs.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "derivations.hh"
|
#include "store-api.hh"
|
||||||
|
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
|
|
@ -776,7 +776,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign("out", DerivationOutput {
|
drv.outputs.insert_or_assign("out", DerivationOutput {
|
||||||
.path = std::move(outPath),
|
.path = std::move(outPath),
|
||||||
.hash = DerivationOutputHash {
|
.hash = FixedOutputHash {
|
||||||
.method = ingestionMethod,
|
.method = ingestionMethod,
|
||||||
.hash = std::move(h),
|
.hash = std::move(h),
|
||||||
},
|
},
|
||||||
|
@ -795,7 +795,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput {
|
DerivationOutput {
|
||||||
.path = StorePath::dummy,
|
.path = StorePath::dummy,
|
||||||
.hash = std::optional<DerivationOutputHash> {},
|
.hash = std::optional<FixedOutputHash> {},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -807,7 +807,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput {
|
DerivationOutput {
|
||||||
.path = std::move(outPath),
|
.path = std::move(outPath),
|
||||||
.hash = std::optional<DerivationOutputHash>(),
|
.hash = std::optional<FixedOutputHash>(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "derivations.hh"
|
#include "store-api.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,10 @@ DownloadFileResult downloadFile(
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
||||||
info.narHash = hashString(htSHA256, *sink.s);
|
info.narHash = hashString(htSHA256, *sink.s);
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
|
info.ca = FixedOutputHash {
|
||||||
|
.method = FileIngestionMethod::Flat,
|
||||||
|
.hash = hash,
|
||||||
|
};
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
||||||
storePath = std::move(info.path);
|
storePath = std::move(info.path);
|
||||||
|
|
|
@ -3714,7 +3714,7 @@ void DerivationGoal::registerOutputs()
|
||||||
/* Check that fixed-output derivations produced the right
|
/* Check that fixed-output derivations produced the right
|
||||||
outputs (i.e., the content hash should match the specified
|
outputs (i.e., the content hash should match the specified
|
||||||
hash). */
|
hash). */
|
||||||
std::string ca;
|
std::optional<ContentAddress> ca;
|
||||||
|
|
||||||
if (fixedOutput) {
|
if (fixedOutput) {
|
||||||
|
|
||||||
|
@ -3764,7 +3764,10 @@ void DerivationGoal::registerOutputs()
|
||||||
else
|
else
|
||||||
assert(worker.store.parseStorePath(path) == dest);
|
assert(worker.store.parseStorePath(path) == dest);
|
||||||
|
|
||||||
ca = makeFixedOutputCA(i.second.hash->method, h2);
|
ca = FixedOutputHash {
|
||||||
|
.method = i.second.hash->method,
|
||||||
|
.hash = h2,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get rid of all weird permissions. This also checks that
|
/* Get rid of all weird permissions. This also checks that
|
||||||
|
@ -3839,7 +3842,10 @@ void DerivationGoal::registerOutputs()
|
||||||
info.ca = ca;
|
info.ca = ca;
|
||||||
worker.store.signPathInfo(info);
|
worker.store.signPathInfo(info);
|
||||||
|
|
||||||
if (!info.references.empty()) info.ca.clear();
|
if (!info.references.empty()) {
|
||||||
|
// FIXME don't we have an experimental feature for fixed output with references?
|
||||||
|
info.ca = {};
|
||||||
|
}
|
||||||
|
|
||||||
infos.emplace(i.first, std::move(info));
|
infos.emplace(i.first, std::move(info));
|
||||||
}
|
}
|
||||||
|
|
85
src/libstore/content-address.cc
Normal file
85
src/libstore/content-address.cc
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
#include "content-address.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
std::string FixedOutputHash::printMethodAlgo() const {
|
||||||
|
return makeFileIngestionPrefix(method) + printHashType(hash.type);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
||||||
|
switch (m) {
|
||||||
|
case FileIngestionMethod::Flat:
|
||||||
|
return "";
|
||||||
|
case FileIngestionMethod::Recursive:
|
||||||
|
return "r:";
|
||||||
|
default:
|
||||||
|
throw Error("impossible, caught both cases");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
|
||||||
|
{
|
||||||
|
return "fixed:"
|
||||||
|
+ makeFileIngestionPrefix(method)
|
||||||
|
+ hash.to_string(Base32, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME Put this somewhere?
|
||||||
|
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
||||||
|
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||||
|
|
||||||
|
std::string renderContentAddress(ContentAddress ca) {
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](TextHash th) {
|
||||||
|
return "text:" + th.hash.to_string(Base32, true);
|
||||||
|
},
|
||||||
|
[](FixedOutputHash fsh) {
|
||||||
|
return makeFixedOutputCA(fsh.method, fsh.hash);
|
||||||
|
}
|
||||||
|
}, ca);
|
||||||
|
}
|
||||||
|
|
||||||
|
ContentAddress parseContentAddress(std::string_view rawCa) {
|
||||||
|
auto prefixSeparator = rawCa.find(':');
|
||||||
|
if (prefixSeparator != string::npos) {
|
||||||
|
auto prefix = string(rawCa, 0, prefixSeparator);
|
||||||
|
if (prefix == "text") {
|
||||||
|
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||||
|
Hash hash = Hash(string(hashTypeAndHash));
|
||||||
|
if (hash.type != htSHA256) {
|
||||||
|
throw Error("parseContentAddress: the text hash should have type SHA256");
|
||||||
|
}
|
||||||
|
return TextHash { hash };
|
||||||
|
} else if (prefix == "fixed") {
|
||||||
|
// This has to be an inverse of makeFixedOutputCA
|
||||||
|
auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||||
|
if (methodAndHash.substr(0,2) == "r:") {
|
||||||
|
std::string_view hashRaw = methodAndHash.substr(2,string::npos);
|
||||||
|
return FixedOutputHash {
|
||||||
|
.method = FileIngestionMethod::Recursive,
|
||||||
|
.hash = Hash(string(hashRaw)),
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
std::string_view hashRaw = methodAndHash;
|
||||||
|
return FixedOutputHash {
|
||||||
|
.method = FileIngestionMethod::Flat,
|
||||||
|
.hash = Hash(string(hashRaw)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw Error("parseContentAddress: format not recognized; has to be text or fixed");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw Error("Not a content address because it lacks an appropriate prefix");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
|
||||||
|
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
|
||||||
|
};
|
||||||
|
|
||||||
|
std::string renderContentAddress(std::optional<ContentAddress> ca) {
|
||||||
|
return ca ? renderContentAddress(*ca) : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
56
src/libstore/content-address.hh
Normal file
56
src/libstore/content-address.hh
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <variant>
|
||||||
|
#include "hash.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
enum struct FileIngestionMethod : uint8_t {
|
||||||
|
Flat = false,
|
||||||
|
Recursive = true
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TextHash {
|
||||||
|
Hash hash;
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Pair of a hash, and how the file system was ingested
|
||||||
|
struct FixedOutputHash {
|
||||||
|
FileIngestionMethod method;
|
||||||
|
Hash hash;
|
||||||
|
std::string printMethodAlgo() const;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
We've accumulated several types of content-addressed paths over the years;
|
||||||
|
fixed-output derivations support multiple hash algorithms and serialisation
|
||||||
|
methods (flat file vs NAR). Thus, ‘ca’ has one of the following forms:
|
||||||
|
|
||||||
|
* ‘text:sha256:<sha256 hash of file contents>’: For paths
|
||||||
|
computed by makeTextPath() / addTextToStore().
|
||||||
|
|
||||||
|
* ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
||||||
|
makeFixedOutputPath() / addToStore().
|
||||||
|
*/
|
||||||
|
typedef std::variant<
|
||||||
|
TextHash, // for paths computed by makeTextPath() / addTextToStore
|
||||||
|
FixedOutputHash // for path computed by makeFixedOutputPath
|
||||||
|
> ContentAddress;
|
||||||
|
|
||||||
|
/* Compute the prefix to the hash algorithm which indicates how the files were
|
||||||
|
ingested. */
|
||||||
|
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
|
||||||
|
|
||||||
|
/* Compute the content-addressability assertion (ValidPathInfo::ca)
|
||||||
|
for paths created by makeFixedOutputPath() / addToStore(). */
|
||||||
|
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
|
||||||
|
|
||||||
|
std::string renderContentAddress(ContentAddress ca);
|
||||||
|
|
||||||
|
std::string renderContentAddress(std::optional<ContentAddress> ca);
|
||||||
|
|
||||||
|
ContentAddress parseContentAddress(std::string_view rawCa);
|
||||||
|
|
||||||
|
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
|
||||||
|
|
||||||
|
}
|
|
@ -652,7 +652,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||||
to << info->ultimate
|
to << info->ultimate
|
||||||
<< info->sigs
|
<< info->sigs
|
||||||
<< info->ca;
|
<< renderContentAddress(info->ca);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
|
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
|
||||||
|
@ -710,7 +710,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
info.references = readStorePaths<StorePathSet>(*store, from);
|
info.references = readStorePaths<StorePathSet>(*store, from);
|
||||||
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(from);
|
info.sigs = readStrings<StringSet>(from);
|
||||||
from >> info.ca >> repair >> dontCheckSigs;
|
info.ca = parseContentAddressOpt(readString(from));
|
||||||
|
from >> repair >> dontCheckSigs;
|
||||||
if (!trusted && dontCheckSigs)
|
if (!trusted && dontCheckSigs)
|
||||||
dontCheckSigs = false;
|
dontCheckSigs = false;
|
||||||
if (!trusted)
|
if (!trusted)
|
||||||
|
|
|
@ -8,11 +8,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
std::string DerivationOutputHash::printMethodAlgo() const {
|
|
||||||
return makeFileIngestionPrefix(method) + printHashType(hash.type);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
const StorePath & BasicDerivation::findOutput(const string & id) const
|
const StorePath & BasicDerivation::findOutput(const string & id) const
|
||||||
{
|
{
|
||||||
auto i = outputs.find(id);
|
auto i = outputs.find(id);
|
||||||
|
@ -113,7 +108,7 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
|
||||||
expect(str, ","); const auto hash = parseString(str);
|
expect(str, ","); const auto hash = parseString(str);
|
||||||
expect(str, ")");
|
expect(str, ")");
|
||||||
|
|
||||||
std::optional<DerivationOutputHash> fsh;
|
std::optional<FixedOutputHash> fsh;
|
||||||
if (hashAlgo != "") {
|
if (hashAlgo != "") {
|
||||||
auto method = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
if (string(hashAlgo, 0, 2) == "r:") {
|
if (string(hashAlgo, 0, 2) == "r:") {
|
||||||
|
@ -121,7 +116,7 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
|
||||||
hashAlgo = string(hashAlgo, 2);
|
hashAlgo = string(hashAlgo, 2);
|
||||||
}
|
}
|
||||||
const HashType hashType = parseHashType(hashAlgo);
|
const HashType hashType = parseHashType(hashAlgo);
|
||||||
fsh = DerivationOutputHash {
|
fsh = FixedOutputHash {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hash = Hash(hash, hashType),
|
.hash = Hash(hash, hashType),
|
||||||
};
|
};
|
||||||
|
@ -411,7 +406,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||||
auto hashAlgo = readString(in);
|
auto hashAlgo = readString(in);
|
||||||
const auto hash = readString(in);
|
const auto hash = readString(in);
|
||||||
|
|
||||||
std::optional<DerivationOutputHash> fsh;
|
std::optional<FixedOutputHash> fsh;
|
||||||
if (hashAlgo != "") {
|
if (hashAlgo != "") {
|
||||||
auto method = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
if (string(hashAlgo, 0, 2) == "r:") {
|
if (string(hashAlgo, 0, 2) == "r:") {
|
||||||
|
@ -419,7 +414,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||||
hashAlgo = string(hashAlgo, 2);
|
hashAlgo = string(hashAlgo, 2);
|
||||||
}
|
}
|
||||||
const HashType hashType = parseHashType(hashAlgo);
|
const HashType hashType = parseHashType(hashAlgo);
|
||||||
fsh = DerivationOutputHash {
|
fsh = FixedOutputHash {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hash = Hash(hash, hashType),
|
.hash = Hash(hash, hashType),
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "path.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "store-api.hh"
|
#include "content-address.hh"
|
||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
|
|
||||||
|
@ -12,18 +13,10 @@ namespace nix {
|
||||||
|
|
||||||
/* Abstract syntax of derivations. */
|
/* Abstract syntax of derivations. */
|
||||||
|
|
||||||
/// Pair of a hash, and how the file system was ingested
|
|
||||||
struct DerivationOutputHash {
|
|
||||||
FileIngestionMethod method;
|
|
||||||
Hash hash;
|
|
||||||
std::string printMethodAlgo() const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct DerivationOutput
|
struct DerivationOutput
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
std::optional<DerivationOutputHash> hash; /* hash used for expected hash computation */
|
std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
|
||||||
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||||
|
@ -76,6 +69,7 @@ struct Derivation : BasicDerivation
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
|
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
||||||
|
|
||||||
/* Write a derivation to the Nix store, and return its path. */
|
/* Write a derivation to the Nix store, and return its path. */
|
||||||
StorePath writeDerivation(ref<Store> store,
|
StorePath writeDerivation(ref<Store> store,
|
||||||
|
|
|
@ -113,8 +113,8 @@ struct LegacySSHStore : public Store
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
info->narHash = s.empty() ? std::optional<Hash>{} : Hash(s);
|
info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s};
|
||||||
conn->from >> info->ca;
|
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
info->sigs = readStrings<StringSet>(conn->from);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ struct LegacySSHStore : public Store
|
||||||
<< info.narSize
|
<< info.narSize
|
||||||
<< info.ultimate
|
<< info.ultimate
|
||||||
<< info.sigs
|
<< info.sigs
|
||||||
<< info.ca;
|
<< renderContentAddress(info.ca);
|
||||||
try {
|
try {
|
||||||
copyNAR(source, conn->to);
|
copyNAR(source, conn->to);
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
|
|
@ -580,7 +580,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
||||||
uint64_t LocalStore::addValidPath(State & state,
|
uint64_t LocalStore::addValidPath(State & state,
|
||||||
const ValidPathInfo & info, bool checkOutputs)
|
const ValidPathInfo & info, bool checkOutputs)
|
||||||
{
|
{
|
||||||
if (info.ca != "" && !info.isContentAddressed(*this))
|
if (info.ca.has_value() && !info.isContentAddressed(*this))
|
||||||
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
|
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
|
||||||
printStorePath(info.path));
|
printStorePath(info.path));
|
||||||
|
|
||||||
|
@ -592,7 +592,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(info.ca, !info.ca.empty())
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
.exec();
|
.exec();
|
||||||
uint64_t id = sqlite3_last_insert_rowid(state.db);
|
uint64_t id = sqlite3_last_insert_rowid(state.db);
|
||||||
|
|
||||||
|
@ -666,7 +666,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
|
||||||
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
|
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
|
||||||
|
|
||||||
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
|
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
|
||||||
if (s) info->ca = s;
|
if (s) info->ca = parseContentAddressOpt(s);
|
||||||
|
|
||||||
/* Get the references. */
|
/* Get the references. */
|
||||||
auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
|
auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
|
||||||
|
@ -689,7 +689,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
(info.narHash->to_string(Base16, true))
|
(info.narHash->to_string(Base16, true))
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(info.ca, !info.ca.empty())
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
(printStorePath(info.path))
|
(printStorePath(info.path))
|
||||||
.exec();
|
.exec();
|
||||||
}
|
}
|
||||||
|
@ -985,15 +985,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
deletePath(realPath);
|
deletePath(realPath);
|
||||||
|
|
||||||
if (info.ca != "" &&
|
// text hashing has long been allowed to have non-self-references because it is used for drv files.
|
||||||
!((hasPrefix(info.ca, "text:") && !info.references.count(info.path))
|
bool refersToSelf = info.references.count(info.path) > 0;
|
||||||
|| info.references.empty()))
|
if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
|
||||||
settings.requireExperimentalFeature("ca-references");
|
settings.requireExperimentalFeature("ca-references");
|
||||||
|
|
||||||
/* While restoring the path from the NAR, compute the hash
|
/* While restoring the path from the NAR, compute the hash
|
||||||
of the NAR. */
|
of the NAR. */
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (info.ca == "" || !info.references.count(info.path))
|
if (!info.ca.has_value() || !info.references.count(info.path))
|
||||||
hashSink = std::make_unique<HashSink>(htSHA256);
|
hashSink = std::make_unique<HashSink>(htSHA256);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
||||||
|
@ -1079,7 +1079,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
ValidPathInfo info(dstPath);
|
ValidPathInfo info(dstPath);
|
||||||
info.narHash = hash.first;
|
info.narHash = hash.first;
|
||||||
info.narSize = hash.second;
|
info.narSize = hash.second;
|
||||||
info.ca = makeFixedOutputCA(method, h);
|
info.ca = FixedOutputHash { .method = method, .hash = h };
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1143,7 +1143,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.references = references;
|
info.references = references;
|
||||||
info.ca = "text:" + hash.to_string(Base32, true);
|
info.ca = TextHash { .hash = hash };
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1254,7 +1254,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (info->ca == "" || !info->references.count(info->path))
|
if (!info->ca || !info->references.count(info->path))
|
||||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||||
|
|
|
@ -203,7 +203,7 @@ public:
|
||||||
narInfo->deriver = StorePath(queryNAR.getStr(9));
|
narInfo->deriver = StorePath(queryNAR.getStr(9));
|
||||||
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
|
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
|
||||||
narInfo->sigs.insert(sig);
|
narInfo->sigs.insert(sig);
|
||||||
narInfo->ca = queryNAR.getStr(11);
|
narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
|
||||||
|
|
||||||
return {oValid, narInfo};
|
return {oValid, narInfo};
|
||||||
});
|
});
|
||||||
|
@ -237,7 +237,7 @@ public:
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
(concatStringsSep(" ", info->sigs))
|
(concatStringsSep(" ", info->sigs))
|
||||||
(info->ca)
|
(renderContentAddress(info->ca))
|
||||||
(time(0)).exec();
|
(time(0)).exec();
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -66,8 +66,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "Sig")
|
else if (name == "Sig")
|
||||||
sigs.insert(value);
|
sigs.insert(value);
|
||||||
else if (name == "CA") {
|
else if (name == "CA") {
|
||||||
if (!ca.empty()) throw corrupt();
|
if (!value.empty()) throw corrupt();
|
||||||
ca = value;
|
// FIXME: allow blank ca or require skipping field?
|
||||||
|
ca = parseContentAddressOpt(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
pos = eol + 1;
|
pos = eol + 1;
|
||||||
|
@ -103,8 +104,8 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
for (auto sig : sigs)
|
for (auto sig : sigs)
|
||||||
res += "Sig: " + sig + "\n";
|
res += "Sig: " + sig + "\n";
|
||||||
|
|
||||||
if (!ca.empty())
|
if (ca)
|
||||||
res += "CA: " + ca + "\n";
|
res += "CA: " + renderContentAddress(*ca) + "\n";
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#include "derivations.hh"
|
#include "store-api.hh"
|
||||||
|
|
||||||
#include <nlohmann/json_fwd.hpp>
|
#include <nlohmann/json_fwd.hpp>
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "content-address.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
@ -65,11 +66,6 @@ typedef std::vector<StorePath> StorePaths;
|
||||||
/* Extension of derivations in the Nix store. */
|
/* Extension of derivations in the Nix store. */
|
||||||
const std::string drvExtension = ".drv";
|
const std::string drvExtension = ".drv";
|
||||||
|
|
||||||
enum struct FileIngestionMethod : uint8_t {
|
|
||||||
Flat = false,
|
|
||||||
Recursive = true
|
|
||||||
};
|
|
||||||
|
|
||||||
struct StorePathWithOutputs
|
struct StorePathWithOutputs
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
|
|
|
@ -381,7 +381,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
||||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
||||||
conn->from >> info->ultimate;
|
conn->from >> info->ultimate;
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
info->sigs = readStrings<StringSet>(conn->from);
|
||||||
conn->from >> info->ca;
|
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
callback(std::move(info));
|
callback(std::move(info));
|
||||||
|
@ -465,7 +465,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
<< info.narHash->to_string(Base16, false);
|
<< info.narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to << info.registrationTime << info.narSize
|
conn->to << info.registrationTime << info.narSize
|
||||||
<< info.ultimate << info.sigs << info.ca
|
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||||
<< repair << !checkSigs;
|
<< repair << !checkSigs;
|
||||||
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
|
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
|
||||||
if (!tunnel) copyNAR(source, conn->to);
|
if (!tunnel) copyNAR(source, conn->to);
|
||||||
|
|
|
@ -471,8 +471,8 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
jsonRefs.elem(printStorePath(ref));
|
jsonRefs.elem(printStorePath(ref));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info->ca != "")
|
if (info->ca)
|
||||||
jsonPath.attr("ca", info->ca);
|
jsonPath.attr("ca", renderContentAddress(info->ca));
|
||||||
|
|
||||||
std::pair<uint64_t, uint64_t> closureSizes;
|
std::pair<uint64_t, uint64_t> closureSizes;
|
||||||
|
|
||||||
|
@ -757,41 +757,35 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
||||||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME Put this somewhere?
|
||||||
|
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
||||||
|
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||||
|
|
||||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||||
{
|
{
|
||||||
auto warn = [&]() {
|
if (! ca) return false;
|
||||||
logWarning(
|
|
||||||
ErrorInfo{
|
|
||||||
.name = "Path not content-addressed",
|
|
||||||
.hint = hintfmt("path '%s' claims to be content-addressed but isn't", store.printStorePath(path))
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
if (hasPrefix(ca, "text:")) {
|
auto caPath = std::visit(overloaded {
|
||||||
Hash hash(ca.substr(5));
|
[&](TextHash th) {
|
||||||
if (store.makeTextPath(path.name(), hash, references) == path)
|
return store.makeTextPath(path.name(), th.hash, references);
|
||||||
return true;
|
},
|
||||||
else
|
[&](FixedOutputHash fsh) {
|
||||||
warn();
|
auto refs = references;
|
||||||
}
|
bool hasSelfReference = false;
|
||||||
|
if (refs.count(path)) {
|
||||||
else if (hasPrefix(ca, "fixed:")) {
|
hasSelfReference = true;
|
||||||
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
|
refs.erase(path);
|
||||||
Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
|
}
|
||||||
auto refs = references;
|
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
|
||||||
bool hasSelfReference = false;
|
|
||||||
if (refs.count(path)) {
|
|
||||||
hasSelfReference = true;
|
|
||||||
refs.erase(path);
|
|
||||||
}
|
}
|
||||||
if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path)
|
}, *ca);
|
||||||
return true;
|
|
||||||
else
|
|
||||||
warn();
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
bool res = caPath == path;
|
||||||
|
|
||||||
|
if (!res)
|
||||||
|
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -822,25 +816,6 @@ Strings ValidPathInfo::shortRefs() const
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
|
||||||
switch (m) {
|
|
||||||
case FileIngestionMethod::Flat:
|
|
||||||
return "";
|
|
||||||
case FileIngestionMethod::Recursive:
|
|
||||||
return "r:";
|
|
||||||
default:
|
|
||||||
throw Error("impossible, caught both cases");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
|
|
||||||
{
|
|
||||||
return "fixed:"
|
|
||||||
+ makeFileIngestionPrefix(method)
|
|
||||||
+ hash.to_string(Base32, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,14 @@
|
||||||
|
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
|
#include "content-address.hh"
|
||||||
#include "serialise.hh"
|
#include "serialise.hh"
|
||||||
#include "crypto.hh"
|
#include "crypto.hh"
|
||||||
#include "lru-cache.hh"
|
#include "lru-cache.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "config.hh"
|
#include "config.hh"
|
||||||
|
#include "derivations.hh"
|
||||||
|
|
||||||
#include <atomic>
|
#include <atomic>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
|
@ -17,6 +19,7 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
#include <variant>
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
@ -31,15 +34,12 @@ MakeError(SubstituterDisabled, Error);
|
||||||
MakeError(NotInStore, Error);
|
MakeError(NotInStore, Error);
|
||||||
|
|
||||||
|
|
||||||
struct BasicDerivation;
|
|
||||||
struct Derivation;
|
|
||||||
class FSAccessor;
|
class FSAccessor;
|
||||||
class NarInfoDiskCache;
|
class NarInfoDiskCache;
|
||||||
class Store;
|
class Store;
|
||||||
class JSONPlaceholder;
|
class JSONPlaceholder;
|
||||||
|
|
||||||
|
|
||||||
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
|
||||||
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||||
|
@ -111,7 +111,6 @@ struct SubstitutablePathInfo
|
||||||
|
|
||||||
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
|
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
|
||||||
|
|
||||||
|
|
||||||
struct ValidPathInfo
|
struct ValidPathInfo
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
|
@ -141,21 +140,11 @@ struct ValidPathInfo
|
||||||
that a particular output path was produced by a derivation; the
|
that a particular output path was produced by a derivation; the
|
||||||
path then implies the contents.)
|
path then implies the contents.)
|
||||||
|
|
||||||
Ideally, the content-addressability assertion would just be a
|
Ideally, the content-addressability assertion would just be a Boolean,
|
||||||
Boolean, and the store path would be computed from
|
and the store path would be computed from the name component, ‘narHash’
|
||||||
the name component, ‘narHash’ and ‘references’. However,
|
and ‘references’. However, we support many types of content addresses.
|
||||||
1) we've accumulated several types of content-addressed paths
|
|
||||||
over the years; and 2) fixed-output derivations support
|
|
||||||
multiple hash algorithms and serialisation methods (flat file
|
|
||||||
vs NAR). Thus, ‘ca’ has one of the following forms:
|
|
||||||
|
|
||||||
* ‘text:sha256:<sha256 hash of file contents>’: For paths
|
|
||||||
computed by makeTextPath() / addTextToStore().
|
|
||||||
|
|
||||||
* ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
|
||||||
makeFixedOutputPath() / addToStore().
|
|
||||||
*/
|
*/
|
||||||
std::string ca;
|
std::optional<ContentAddress> ca;
|
||||||
|
|
||||||
bool operator == (const ValidPathInfo & i) const
|
bool operator == (const ValidPathInfo & i) const
|
||||||
{
|
{
|
||||||
|
@ -840,15 +829,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
|
||||||
std::istream & str,
|
std::istream & str,
|
||||||
bool hashGiven = false);
|
bool hashGiven = false);
|
||||||
|
|
||||||
/* Compute the prefix to the hash algorithm which indicates how the files were
|
|
||||||
ingested. */
|
|
||||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
|
|
||||||
|
|
||||||
/* Compute the content-addressability assertion (ValidPathInfo::ca)
|
|
||||||
for paths created by makeFixedOutputPath() / addToStore(). */
|
|
||||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
|
|
||||||
|
|
||||||
|
|
||||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <boost/format.hpp>
|
#include <boost/format.hpp>
|
||||||
|
#include <boost/algorithm/string/replace.hpp>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include "ansicolor.hh"
|
#include "ansicolor.hh"
|
||||||
|
|
||||||
|
@ -103,7 +104,9 @@ class hintformat
|
||||||
public:
|
public:
|
||||||
hintformat(const string &format) :fmt(format)
|
hintformat(const string &format) :fmt(format)
|
||||||
{
|
{
|
||||||
fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
|
fmt.exceptions(boost::io::all_error_bits ^
|
||||||
|
boost::io::too_many_args_bit ^
|
||||||
|
boost::io::too_few_args_bit);
|
||||||
}
|
}
|
||||||
|
|
||||||
hintformat(const hintformat &hf)
|
hintformat(const hintformat &hf)
|
||||||
|
@ -117,6 +120,13 @@ public:
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
hintformat& operator%(const normaltxt<T> &value)
|
||||||
|
{
|
||||||
|
fmt % value.value;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
std::string str() const
|
std::string str() const
|
||||||
{
|
{
|
||||||
return fmt.str();
|
return fmt.str();
|
||||||
|
@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
|
||||||
return f;
|
return f;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline hintformat hintfmt(std::string plain_string)
|
||||||
|
{
|
||||||
|
// we won't be receiving any args in this case, so just print the original string
|
||||||
|
return hintfmt("%s", normaltxt(plain_string));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -227,7 +227,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
|
||||||
if (!ht)
|
if (!ht)
|
||||||
throw BadHash("empty hash requires explicit hash type");
|
throw BadHash("empty hash requires explicit hash type");
|
||||||
Hash h(*ht);
|
Hash h(*ht);
|
||||||
warn("found empty hash, assuming '%s'", h.to_string(Base::SRI, true));
|
warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
|
||||||
return h;
|
return h;
|
||||||
} else
|
} else
|
||||||
return Hash(hashStr, ht);
|
return Hash(hashStr, ht);
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "logging.hh"
|
#include "logging.hh"
|
||||||
#include "nixexpr.hh"
|
#include "nixexpr.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
#include <fstream>
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
@ -42,7 +43,7 @@ namespace nix {
|
||||||
logger->logEI(ei);
|
logger->logEI(ei);
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
|
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -60,8 +61,7 @@ namespace nix {
|
||||||
logError(e.info());
|
logError(e.info());
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
|
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,9 +69,9 @@ namespace nix {
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logger->logEI({ .level = lvlInfo,
|
logger->logEI({ .level = lvlInfo,
|
||||||
.name = "Info name",
|
.name = "Info name",
|
||||||
.description = "Info description",
|
.description = "Info description",
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
|
||||||
|
@ -85,7 +85,7 @@ namespace nix {
|
||||||
logger->logEI({ .level = lvlTalkative,
|
logger->logEI({ .level = lvlTalkative,
|
||||||
.name = "Talkative name",
|
.name = "Talkative name",
|
||||||
.description = "Talkative description",
|
.description = "Talkative description",
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
|
||||||
|
@ -99,7 +99,7 @@ namespace nix {
|
||||||
logger->logEI({ .level = lvlChatty,
|
logger->logEI({ .level = lvlChatty,
|
||||||
.name = "Chatty name",
|
.name = "Chatty name",
|
||||||
.description = "Talkative description",
|
.description = "Talkative description",
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
|
||||||
|
@ -113,7 +113,7 @@ namespace nix {
|
||||||
logger->logEI({ .level = lvlDebug,
|
logger->logEI({ .level = lvlDebug,
|
||||||
.name = "Debug name",
|
.name = "Debug name",
|
||||||
.description = "Debug description",
|
.description = "Debug description",
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
|
||||||
|
@ -127,7 +127,7 @@ namespace nix {
|
||||||
logger->logEI({ .level = lvlVomit,
|
logger->logEI({ .level = lvlVomit,
|
||||||
.name = "Vomit name",
|
.name = "Vomit name",
|
||||||
.description = "Vomit description",
|
.description = "Vomit description",
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
|
||||||
|
@ -144,7 +144,7 @@ namespace nix {
|
||||||
logError({
|
logError({
|
||||||
.name = "name",
|
.name = "name",
|
||||||
.description = "error description",
|
.description = "error description",
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
|
||||||
|
@ -160,13 +160,13 @@ namespace nix {
|
||||||
.name = "error name",
|
.name = "error name",
|
||||||
.description = "error with code lines",
|
.description = "error with code lines",
|
||||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||||
"yellow",
|
"yellow",
|
||||||
"values"),
|
"values"),
|
||||||
.nixCode = NixCode {
|
.nixCode = NixCode {
|
||||||
.errPos = Pos(problem_file, 40, 13),
|
.errPos = Pos(problem_file, 40, 13),
|
||||||
.prevLineOfCode = "previous line of code",
|
.prevLineOfCode = "previous line of code",
|
||||||
.errLineOfCode = "this is the problem line of code",
|
.errLineOfCode = "this is the problem line of code",
|
||||||
.nextLineOfCode = "next line of code",
|
.nextLineOfCode = "next line of code",
|
||||||
}});
|
}});
|
||||||
|
|
||||||
|
|
||||||
|
@ -183,10 +183,10 @@ namespace nix {
|
||||||
.name = "error name",
|
.name = "error name",
|
||||||
.description = "error without any code lines.",
|
.description = "error without any code lines.",
|
||||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||||
"yellow",
|
"yellow",
|
||||||
"values"),
|
"values"),
|
||||||
.nixCode = NixCode {
|
.nixCode = NixCode {
|
||||||
.errPos = Pos(problem_file, 40, 13)
|
.errPos = Pos(problem_file, 40, 13)
|
||||||
}});
|
}});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
|
@ -202,7 +202,7 @@ namespace nix {
|
||||||
.name = "error name",
|
.name = "error name",
|
||||||
.hint = hintfmt("hint %1%", "only"),
|
.hint = hintfmt("hint %1%", "only"),
|
||||||
.nixCode = NixCode {
|
.nixCode = NixCode {
|
||||||
.errPos = Pos(problem_file, 40, 13)
|
.errPos = Pos(problem_file, 40, 13)
|
||||||
}});
|
}});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
|
@ -218,10 +218,10 @@ namespace nix {
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logWarning({
|
logWarning({
|
||||||
.name = "name",
|
.name = "name",
|
||||||
.description = "error description",
|
.description = "error description",
|
||||||
.hint = hintfmt("there was a %1%", "warning"),
|
.hint = hintfmt("there was a %1%", "warning"),
|
||||||
});
|
});
|
||||||
|
|
||||||
auto str = testing::internal::GetCapturedStderr();
|
auto str = testing::internal::GetCapturedStderr();
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
|
||||||
|
@ -238,13 +238,13 @@ namespace nix {
|
||||||
.name = "warning name",
|
.name = "warning name",
|
||||||
.description = "warning description",
|
.description = "warning description",
|
||||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||||
"yellow",
|
"yellow",
|
||||||
"values"),
|
"values"),
|
||||||
.nixCode = NixCode {
|
.nixCode = NixCode {
|
||||||
.errPos = Pos(problem_file, 40, 13),
|
.errPos = Pos(problem_file, 40, 13),
|
||||||
.prevLineOfCode = std::nullopt,
|
.prevLineOfCode = std::nullopt,
|
||||||
.errLineOfCode = "this is the problem line of code",
|
.errLineOfCode = "this is the problem line of code",
|
||||||
.nextLineOfCode = std::nullopt
|
.nextLineOfCode = std::nullopt
|
||||||
}});
|
}});
|
||||||
|
|
||||||
|
|
||||||
|
@ -252,4 +252,41 @@ namespace nix {
|
||||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
* hintfmt
|
||||||
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
TEST(hintfmt, percentStringWithoutArgs) {
|
||||||
|
|
||||||
|
const char *teststr = "this is 100%s correct!";
|
||||||
|
|
||||||
|
ASSERT_STREQ(
|
||||||
|
hintfmt(teststr).str().c_str(),
|
||||||
|
teststr);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(hintfmt, fmtToHintfmt) {
|
||||||
|
|
||||||
|
ASSERT_STREQ(
|
||||||
|
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
|
||||||
|
"the color of this this text is not yellow");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(hintfmt, tooFewArguments) {
|
||||||
|
|
||||||
|
ASSERT_STREQ(
|
||||||
|
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
|
||||||
|
"only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(hintfmt, tooManyArguments) {
|
||||||
|
|
||||||
|
ASSERT_STREQ(
|
||||||
|
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
|
||||||
|
"what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -864,7 +864,9 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
out << info->narSize // downloadSize
|
out << info->narSize // downloadSize
|
||||||
<< info->narSize;
|
<< info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||||
out << (info->narHash ? info->narHash->to_string(Base32, true) : "") << info->ca << info->sigs;
|
out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
|
||||||
|
<< renderContentAddress(info->ca)
|
||||||
|
<< info->sigs;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -952,7 +954,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
info.references = readStorePaths<StorePathSet>(*store, in);
|
info.references = readStorePaths<StorePathSet>(*store, in);
|
||||||
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(in);
|
info.sigs = readStrings<StringSet>(in);
|
||||||
in >> info.ca;
|
info.ca = parseContentAddressOpt(readString(in));
|
||||||
|
|
||||||
if (info.narSize == 0)
|
if (info.narSize == 0)
|
||||||
throw Error("narInfo is too old and missing the narSize field");
|
throw Error("narInfo is too old and missing the narSize field");
|
||||||
|
|
|
@ -48,7 +48,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
|
||||||
*info.narHash = narHash;
|
*info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, *info.narHash);
|
info.ca = std::optional { FixedOutputHash {
|
||||||
|
.method = FileIngestionMethod::Recursive,
|
||||||
|
.hash = *info.narHash,
|
||||||
|
} };
|
||||||
|
|
||||||
if (!dryRun) {
|
if (!dryRun) {
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
|
|
|
@ -137,7 +137,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
|
||||||
auto shellOutPath = store->makeOutputPath("out", h, drvName);
|
auto shellOutPath = store->makeOutputPath("out", h, drvName);
|
||||||
drv.outputs.insert_or_assign("out", DerivationOutput {
|
drv.outputs.insert_or_assign("out", DerivationOutput {
|
||||||
.path = shellOutPath,
|
.path = shellOutPath,
|
||||||
.hash = DerivationOutputHash {
|
.hash = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Flat,
|
.method = FileIngestionMethod::Flat,
|
||||||
.hash = Hash { htSHA256 },
|
.hash = Hash { htSHA256 },
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
|
#include "content-address.hh"
|
||||||
#include "legacy.hh"
|
#include "legacy.hh"
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "references.hh"
|
#include "references.hh"
|
||||||
|
|
|
@ -82,7 +82,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||||
if (hasSelfReference) info.references.insert(info.path);
|
if (hasSelfReference) info.references.insert(info.path);
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, *info.narHash);
|
info.ca = FixedOutputHash {
|
||||||
|
.method = FileIngestionMethod::Recursive,
|
||||||
|
.hash = *info.narHash,
|
||||||
|
};
|
||||||
|
|
||||||
if (!json)
|
if (!json)
|
||||||
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
|
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
|
||||||
|
|
|
@ -115,7 +115,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
||||||
std::cout << '\t';
|
std::cout << '\t';
|
||||||
Strings ss;
|
Strings ss;
|
||||||
if (info->ultimate) ss.push_back("ultimate");
|
if (info->ultimate) ss.push_back("ultimate");
|
||||||
if (info->ca != "") ss.push_back("ca:" + info->ca);
|
if (info->ca) ss.push_back("ca:" + renderContentAddress(*info->ca));
|
||||||
for (auto & sig : info->sigs) ss.push_back(sig);
|
for (auto & sig : info->sigs) ss.push_back(sig);
|
||||||
std::cout << concatStringsSep(" ", ss);
|
std::cout << concatStringsSep(" ", ss);
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,7 +87,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
if (!noContents) {
|
if (!noContents) {
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (info->ca == "")
|
if (!info->ca)
|
||||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||||
|
|
Loading…
Reference in a new issue