mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-21 21:46:15 +02:00
Add setting to warn about copying/hashing large paths
This is useful for diagnosing whether an evaluation is copying large paths to the store. Example: $ nix build .#packages.x86_64-linux.default --large-path-warning-threshold 1000000 warning: copied large path '/home/eelco/Dev/nix-master/' to the store (6271792 bytes) warning: copied large path '«github:NixOS/nixpkgs/b550fe4b4776908ac2a861124307045f8e717c8e?narHash=sha256-7kkJQd4rZ%2BvFrzWu8sTRtta5D1kBG0LSRYAfhtmMlSo%3D»/' to the store (155263768 bytes) warning: copied large path '«github:libgit2/libgit2/45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5?narHash=sha256-oX4Z3S9WtJlwvj0uH9HlYcWv%2Bx1hqp8mhXl7HsLu2f0%3D»/' to the store (22175416 bytes) warning: copied large path '/nix/store/z985088mcd6w23qwdlirsinnyzayagki-source' to the store (5885872 bytes)
This commit is contained in:
parent
d8559cad8d
commit
dbe1b51580
9 changed files with 54 additions and 15 deletions
|
@ -258,7 +258,7 @@ hashPath(char * algo, int base32, char * path)
|
|||
try {
|
||||
Hash h = hashPath(
|
||||
PosixSourceAccessor::createAtRoot(path),
|
||||
FileIngestionMethod::Recursive, parseHashAlgo(algo));
|
||||
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
|
|
|
@ -453,7 +453,7 @@ StorePath BinaryCacheStore::addToStore(
|
|||
non-recursive+sha256 so we can just use the default
|
||||
implementation of this method in terms of addToStoreFromDump. */
|
||||
|
||||
auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter).first;
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
path.dumpPath(sink, filter);
|
||||
|
|
|
@ -1262,6 +1262,16 @@ public:
|
|||
store paths of the latest Nix release.
|
||||
)"
|
||||
};
|
||||
|
||||
Setting<uint64_t> largePathWarningThreshold{
|
||||
this,
|
||||
std::numeric_limits<uint64_t>::max(),
|
||||
"large-path-warning-threshold",
|
||||
R"(
|
||||
Warn when copying a path larger than this number of bytes to the Nix store
|
||||
(as determined by its NAR serialisation).
|
||||
)"
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -1272,7 +1272,7 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
? dumpHash
|
||||
: hashPath(
|
||||
PosixSourceAccessor::createAtRoot(tempPath),
|
||||
hashMethod.getFileIngestionMethod(), hashAlgo),
|
||||
hashMethod.getFileIngestionMethod(), hashAlgo).first,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
|
@ -1412,7 +1412,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
PosixSourceAccessor accessor;
|
||||
std::string hash = hashPath(
|
||||
PosixSourceAccessor::createAtRoot(link.path()),
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).to_string(HashFormat::Nix32, false);
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
|
||||
if (hash != name.string()) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
link.path(), name, hash);
|
||||
|
|
|
@ -169,7 +169,9 @@ std::pair<StorePath, Hash> StoreDirConfig::computeStorePath(
|
|||
const StorePathSet & references,
|
||||
PathFilter & filter) const
|
||||
{
|
||||
auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
auto [h, size] = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
if (size && *size >= settings.largePathWarningThreshold)
|
||||
warn("hashed large path '%s' (%d bytes)", path, *size);
|
||||
return {
|
||||
makeFixedOutputPathFromCA(
|
||||
name,
|
||||
|
@ -210,7 +212,11 @@ StorePath Store::addToStore(
|
|||
auto source = sinkToSource([&](Sink & sink) {
|
||||
dumpPath(path, sink, fsm, filter);
|
||||
});
|
||||
return addToStoreFromDump(*source, name, fsm, method, hashAlgo, references, repair);
|
||||
LengthSource lengthSource(*source);
|
||||
auto storePath = addToStoreFromDump(lengthSource, name, fsm, method, hashAlgo, references, repair);
|
||||
if (lengthSource.total >= settings.largePathWarningThreshold)
|
||||
warn("copied large path '%s' to the store (%d bytes)", path, lengthSource.total);
|
||||
return storePath;
|
||||
}
|
||||
|
||||
void Store::addMultipleToStore(
|
||||
|
|
|
@ -529,9 +529,9 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
if (!pathExists(store.printStorePath(path)))
|
||||
res = false;
|
||||
else {
|
||||
Hash current = hashPath(
|
||||
auto current = hashPath(
|
||||
{store.getFSAccessor(), CanonPath(store.printStorePath(path))},
|
||||
FileIngestionMethod::Recursive, info->narHash.algo);
|
||||
FileIngestionMethod::Recursive, info->narHash.algo).first;
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current;
|
||||
}
|
||||
|
|
|
@ -112,17 +112,19 @@ HashResult hashPath(
|
|||
}
|
||||
|
||||
|
||||
Hash hashPath(
|
||||
std::pair<Hash, std::optional<uint64_t>> hashPath(
|
||||
const SourcePath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ht,
|
||||
PathFilter & filter)
|
||||
{
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::Recursive:
|
||||
return hashPath(path, (FileSerialisationMethod) method, ht, filter).first;
|
||||
case FileIngestionMethod::Recursive: {
|
||||
auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter);
|
||||
return {res.first, {res.second}};
|
||||
}
|
||||
case FileIngestionMethod::Git:
|
||||
return git::dumpHash(ht, path, filter).hash;
|
||||
return {git::dumpHash(ht, path, filter).hash, std::nullopt};
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
|
|
|
@ -132,14 +132,15 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method);
|
|||
|
||||
/**
|
||||
* Compute the hash of the given file system object according to the
|
||||
* given method.
|
||||
* given method, and for some ingestion methods, the size of the
|
||||
* serialisation.
|
||||
*
|
||||
* Unlike the other `hashPath`, this works on an arbitrary
|
||||
* `FileIngestionMethod` instead of `FileSerialisationMethod`, but
|
||||
* doesn't return the size as this is this is not a both simple and
|
||||
* may not return the size as this is this is not a both simple and
|
||||
* useful defined for a merkle format.
|
||||
*/
|
||||
Hash hashPath(
|
||||
std::pair<Hash, std::optional<uint64_t>> hashPath(
|
||||
const SourcePath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
|
|
@ -283,6 +283,26 @@ struct LengthSink : Sink
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A wrapper source that counts the number of bytes read from it.
|
||||
*/
|
||||
struct LengthSource : Source
|
||||
{
|
||||
Source & next;
|
||||
|
||||
LengthSource(Source & next) : next(next)
|
||||
{ }
|
||||
|
||||
uint64_t total = 0;
|
||||
|
||||
size_t read(char * data, size_t len) override
|
||||
{
|
||||
auto n = next.read(data, len);
|
||||
total += n;
|
||||
return n;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a function into a sink.
|
||||
*/
|
||||
|
|
Loading…
Reference in a new issue