mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 05:56:15 +02:00
Merge pull request #10655 from edolstra/use-source-path
Use `SourcePath` in more places
This commit is contained in:
commit
1ad7b5451d
29 changed files with 112 additions and 119 deletions
|
@ -256,9 +256,8 @@ SV *
|
|||
hashPath(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||
Hash h = hashPath(
|
||||
accessor, canonPath,
|
||||
PosixSourceAccessor::createAtRoot(path),
|
||||
FileIngestionMethod::Recursive, parseHashAlgo(algo));
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
|
@ -336,10 +335,9 @@ StoreWrapper::addToStore(char * srcPath, int recursive, char * algo)
|
|||
PPCODE:
|
||||
try {
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath);
|
||||
auto path = THIS->store->addToStore(
|
||||
std::string(baseNameOf(srcPath)),
|
||||
accessor, canonPath,
|
||||
PosixSourceAccessor::createAtRoot(srcPath),
|
||||
method, parseHashAlgo(algo));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
|
|
|
@ -146,7 +146,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
.category = category,
|
||||
.labels = {"flake-lock-path"},
|
||||
.handler = {[&](std::string lockFilePath) {
|
||||
lockFlags.referenceLockFilePath = {makeFSSourceAccessor(), CanonPath(absPath(lockFilePath))};
|
||||
lockFlags.referenceLockFilePath = {getFSSourceAccessor(), CanonPath(absPath(lockFilePath))};
|
||||
}},
|
||||
.completer = completePath
|
||||
});
|
||||
|
|
|
@ -399,14 +399,14 @@ EvalState::EvalState(
|
|||
, emptyBindings(0)
|
||||
, rootFS(
|
||||
evalSettings.restrictEval || evalSettings.pureEval
|
||||
? ref<SourceAccessor>(AllowListSourceAccessor::create(makeFSSourceAccessor(), {},
|
||||
? ref<SourceAccessor>(AllowListSourceAccessor::create(getFSSourceAccessor(), {},
|
||||
[](const CanonPath & path) -> RestrictedPathError {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure evaluation mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||
}))
|
||||
: makeFSSourceAccessor())
|
||||
: getFSSourceAccessor())
|
||||
, corepkgsFS(make_ref<MemorySourceAccessor>())
|
||||
, internalFS(make_ref<MemorySourceAccessor>())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
|
|
|
@ -42,9 +42,9 @@ StorePath fetchToStore(
|
|||
auto storePath =
|
||||
mode == FetchMode::DryRun
|
||||
? store.computeStorePath(
|
||||
name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
|
||||
name, path, method, HashAlgorithm::SHA256, {}, filter2).first
|
||||
: store.addToStore(
|
||||
name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
|
||||
if (cacheKey && mode == FetchMode::Copy)
|
||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||
|
|
|
@ -212,7 +212,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
auto storePath = store->addToStore(
|
||||
input.getName(),
|
||||
*makeFSSourceAccessor(), CanonPath { actualPath },
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
|
||||
filter);
|
||||
|
||||
|
@ -318,7 +318,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
deletePath(tmpDir + "/.hg_archival.txt");
|
||||
|
||||
auto storePath = store->addToStore(name, *makeFSSourceAccessor(), CanonPath { tmpDir });
|
||||
auto storePath = store->addToStore(name, {getFSSourceAccessor(), CanonPath(tmpDir)});
|
||||
|
||||
Attrs infoAttrs({
|
||||
{"rev", input.getRev()->gitRev()},
|
||||
|
|
|
@ -442,8 +442,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
|
@ -454,10 +453,10 @@ StorePath BinaryCacheStore::addToStore(
|
|||
non-recursive+sha256 so we can just use the default
|
||||
implementation of this method in terms of addToStoreFromDump. */
|
||||
|
||||
auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
accessor.dumpPath(path, sink, filter);
|
||||
path.dumpPath(sink, filter);
|
||||
});
|
||||
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
|
|
|
@ -133,8 +133,7 @@ public:
|
|||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
|
|
|
@ -60,8 +60,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
|
|
|
@ -167,14 +167,13 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const
|
|||
|
||||
std::pair<StorePath, Hash> StoreDirConfig::computeStorePath(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter) const
|
||||
{
|
||||
auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter);
|
||||
return {
|
||||
makeFixedOutputPathFromCA(
|
||||
name,
|
||||
|
@ -192,8 +191,7 @@ std::pair<StorePath, Hash> StoreDirConfig::computeStorePath(
|
|||
|
||||
StorePath Store::addToStore(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
|
@ -214,7 +212,7 @@ StorePath Store::addToStore(
|
|||
break;
|
||||
}
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
dumpPath(accessor, path, sink, fsm, filter);
|
||||
dumpPath(path, sink, fsm, filter);
|
||||
});
|
||||
return addToStoreFromDump(*source, name, fsm, method, hashAlgo, references, repair);
|
||||
}
|
||||
|
@ -343,8 +341,7 @@ digraph graphname {
|
|||
*/
|
||||
ValidPathInfo Store::addToStoreSlow(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
const SourcePath & srcPath,
|
||||
ContentAddressMethod method, HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
std::optional<Hash> expectedCAHash)
|
||||
|
@ -366,7 +363,7 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
srcPath. The fact that we use scratchpadSink as a temporary buffer here
|
||||
is an implementation detail. */
|
||||
auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
|
||||
accessor.dumpPath(srcPath, scratchpadSink);
|
||||
srcPath.dumpPath(scratchpadSink);
|
||||
});
|
||||
|
||||
/* tapped provides the same data as fileSource, but we also write all the
|
||||
|
@ -389,13 +386,12 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
|
||||
? narHash
|
||||
: method == FileIngestionMethod::Git
|
||||
? git::dumpHash(hashAlgo, accessor, srcPath).hash
|
||||
? git::dumpHash(hashAlgo, srcPath).hash
|
||||
: caHashSink.finish().first;
|
||||
|
||||
if (expectedCAHash && expectedCAHash != hash)
|
||||
throw Error("hash mismatch for '%s'", srcPath);
|
||||
|
||||
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
|
@ -412,7 +408,7 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
|
||||
if (!isValidPath(info.path)) {
|
||||
auto source = sinkToSource([&](Sink & scratchpadSink) {
|
||||
accessor.dumpPath(srcPath, scratchpadSink);
|
||||
srcPath.dumpPath(scratchpadSink);
|
||||
});
|
||||
addToStore(info, *source);
|
||||
}
|
||||
|
|
|
@ -439,8 +439,7 @@ public:
|
|||
*/
|
||||
virtual StorePath addToStore(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
|
@ -454,8 +453,7 @@ public:
|
|||
*/
|
||||
ValidPathInfo addToStoreSlow(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct SourcePath;
|
||||
|
||||
MakeError(BadStorePath, Error);
|
||||
|
||||
struct StoreDirConfig : public Config
|
||||
|
@ -94,8 +96,7 @@ struct StoreDirConfig : public Config
|
|||
*/
|
||||
std::pair<StorePath, Hash> computeStorePath(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = {},
|
||||
|
|
|
@ -1306,8 +1306,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
const SourcePath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
|
@ -2485,7 +2484,6 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
/* FIXME optimize and deduplicate with addToStore */
|
||||
std::string oldHashPart { scratchPath->hashPart() };
|
||||
auto got = [&]{
|
||||
PosixSourceAccessor accessor;
|
||||
auto fim = outputHash.method.getFileIngestionMethod();
|
||||
switch (fim) {
|
||||
case FileIngestionMethod::Flat:
|
||||
|
@ -2494,15 +2492,15 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
|
||||
auto fim = outputHash.method.getFileIngestionMethod();
|
||||
dumpPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
caSink,
|
||||
(FileSerialisationMethod) fim);
|
||||
return caSink.finish().first;
|
||||
}
|
||||
case FileIngestionMethod::Git: {
|
||||
return git::dumpHash(
|
||||
outputHash.hashAlgo, accessor,
|
||||
CanonPath { tmpDir + "/tmp" }).hash;
|
||||
outputHash.hashAlgo,
|
||||
{getFSSourceAccessor(), CanonPath(tmpDir + "/tmp")}).hash;
|
||||
}
|
||||
}
|
||||
assert(false);
|
||||
|
@ -2529,9 +2527,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
}
|
||||
|
||||
{
|
||||
PosixSourceAccessor accessor;
|
||||
HashResult narHashAndSize = hashPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
|
@ -2553,9 +2550,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
std::string { scratchPath->hashPart() },
|
||||
std::string { requiredFinalPath.hashPart() });
|
||||
rewriteOutput(outputRewrites);
|
||||
PosixSourceAccessor accessor;
|
||||
HashResult narHashAndSize = hashPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
|
||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
|
|
|
@ -530,7 +530,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
res = false;
|
||||
else {
|
||||
Hash current = hashPath(
|
||||
*store.getFSAccessor(), CanonPath { store.printStorePath(path) },
|
||||
{store.getFSAccessor(), CanonPath(store.printStorePath(path))},
|
||||
FileIngestionMethod::Recursive, info->narHash.algo);
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current;
|
||||
|
|
|
@ -1132,12 +1132,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
specified.hash.algo,
|
||||
std::string { info.path.hashPart() },
|
||||
};
|
||||
dumpPath(*accessor, path, caSink, (FileSerialisationMethod) fim);
|
||||
dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim);
|
||||
h = caSink.finish().first;
|
||||
break;
|
||||
}
|
||||
case FileIngestionMethod::Git:
|
||||
h = git::dumpHash(specified.hash.algo, *accessor, path).hash;
|
||||
h = git::dumpHash(specified.hash.algo, {accessor, path}).hash;
|
||||
break;
|
||||
}
|
||||
ContentAddress {
|
||||
|
@ -1247,14 +1247,12 @@ StorePath LocalStore::addToStoreFromDump(
|
|||
|
||||
auto [dumpHash, size] = hashSink->finish();
|
||||
|
||||
PosixSourceAccessor accessor;
|
||||
|
||||
auto desc = ContentAddressWithReferences::fromParts(
|
||||
hashMethod,
|
||||
methodsMatch
|
||||
? dumpHash
|
||||
: hashPath(
|
||||
accessor, CanonPath { tempPath },
|
||||
{getFSSourceAccessor(), CanonPath(tempPath)},
|
||||
hashMethod.getFileIngestionMethod(), hashAlgo),
|
||||
{
|
||||
.others = references,
|
||||
|
@ -1394,7 +1392,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
Path linkPath = linksDir + "/" + link.name;
|
||||
PosixSourceAccessor accessor;
|
||||
std::string hash = hashPath(
|
||||
accessor, CanonPath { linkPath },
|
||||
{getFSSourceAccessor(), CanonPath(linkPath)},
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).to_string(HashFormat::Nix32, false);
|
||||
if (hash != link.name) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
|
|
|
@ -148,9 +148,8 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
contents of the symlink (i.e. the result of readlink()), not
|
||||
the contents of the target (which may not even exist). */
|
||||
Hash hash = ({
|
||||
PosixSourceAccessor accessor;
|
||||
hashPath(
|
||||
accessor, CanonPath { path },
|
||||
{make_ref<PosixSourceAccessor>(), CanonPath(path)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
});
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||
|
@ -163,9 +162,8 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
auto stLink = lstat(linkPath);
|
||||
if (st.st_size != stLink.st_size
|
||||
|| (repair && hash != ({
|
||||
PosixSourceAccessor accessor;
|
||||
hashPath(
|
||||
accessor, CanonPath { linkPath },
|
||||
{make_ref<PosixSourceAccessor>(), CanonPath(linkPath)},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
})))
|
||||
{
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "archive.hh"
|
||||
#include "config.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "file-system.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
|
@ -110,9 +111,9 @@ void SourceAccessor::dumpPath(
|
|||
|
||||
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||
accessor.dumpPath(canonPath, sink, filter);
|
||||
return accessor.mtime;
|
||||
auto path2 = PosixSourceAccessor::createAtRoot(path);
|
||||
path2.dumpPath(sink, filter);
|
||||
return path2.accessor.dynamic_pointer_cast<PosixSourceAccessor>()->mtime;
|
||||
}
|
||||
|
||||
void dumpPath(const Path & path, Sink & sink, PathFilter & filter)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "file-content-address.hh"
|
||||
#include "archive.hh"
|
||||
#include "git.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -68,17 +69,17 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
|
|||
|
||||
|
||||
void dumpPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
FileSerialisationMethod method,
|
||||
PathFilter & filter)
|
||||
{
|
||||
switch (method) {
|
||||
case FileSerialisationMethod::Flat:
|
||||
accessor.readFile(path, sink);
|
||||
path.readFile(sink);
|
||||
break;
|
||||
case FileSerialisationMethod::Recursive:
|
||||
accessor.dumpPath(path, sink, filter);
|
||||
path.dumpPath(sink, filter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -101,27 +102,27 @@ void restorePath(
|
|||
|
||||
|
||||
HashResult hashPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
FileSerialisationMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter)
|
||||
{
|
||||
HashSink sink { ha };
|
||||
dumpPath(accessor, path, sink, method, filter);
|
||||
dumpPath(path, sink, method, filter);
|
||||
return sink.finish();
|
||||
}
|
||||
|
||||
|
||||
Hash hashPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ht,
|
||||
PathFilter & filter)
|
||||
{
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::Recursive:
|
||||
return hashPath(accessor, path, (FileSerialisationMethod) method, ht, filter).first;
|
||||
return hashPath(path, (FileSerialisationMethod) method, ht, filter).first;
|
||||
case FileIngestionMethod::Git:
|
||||
return git::dumpHash(ht, accessor, path, filter).hash;
|
||||
return git::dumpHash(ht, path, filter).hash;
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct SourcePath;
|
||||
|
||||
/**
|
||||
* An enumeration of the ways we can serialize file system
|
||||
* objects.
|
||||
|
@ -45,7 +47,7 @@ std::string_view renderFileSerialisationMethod(FileSerialisationMethod method);
|
|||
* Dump a serialization of the given file system object.
|
||||
*/
|
||||
void dumpPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
FileSerialisationMethod method,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
@ -72,7 +74,7 @@ void restorePath(
|
|||
* ```
|
||||
*/
|
||||
HashResult hashPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
FileSerialisationMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
|
@ -138,7 +140,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method);
|
|||
* useful defined for a merkle format.
|
||||
*/
|
||||
Hash hashPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
#include "signals.hh"
|
||||
#include "config.hh"
|
||||
#include "hash.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include "git.hh"
|
||||
#include "serialise.hh"
|
||||
|
@ -269,18 +268,18 @@ void dumpTree(const Tree & entries, Sink & sink,
|
|||
|
||||
|
||||
Mode dump(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
std::function<DumpHook> hook,
|
||||
PathFilter & filter,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
auto st = accessor.lstat(path);
|
||||
auto st = path.lstat();
|
||||
|
||||
switch (st.type) {
|
||||
case SourceAccessor::tRegular:
|
||||
{
|
||||
accessor.readFile(path, sink, [&](uint64_t size) {
|
||||
path.readFile(sink, [&](uint64_t size) {
|
||||
dumpBlobPrefix(size, sink, xpSettings);
|
||||
});
|
||||
return st.isExecutable
|
||||
|
@ -291,9 +290,9 @@ Mode dump(
|
|||
case SourceAccessor::tDirectory:
|
||||
{
|
||||
Tree entries;
|
||||
for (auto & [name, _] : accessor.readDirectory(path)) {
|
||||
for (auto & [name, _] : path.readDirectory()) {
|
||||
auto child = path / name;
|
||||
if (!filter(child.abs())) continue;
|
||||
if (!filter(child.path.abs())) continue;
|
||||
|
||||
auto entry = hook(child);
|
||||
|
||||
|
@ -309,7 +308,7 @@ Mode dump(
|
|||
|
||||
case SourceAccessor::tSymlink:
|
||||
{
|
||||
auto target = accessor.readLink(path);
|
||||
auto target = path.readLink();
|
||||
dumpBlobPrefix(target.size(), sink, xpSettings);
|
||||
sink(target);
|
||||
return Mode::Symlink;
|
||||
|
@ -323,13 +322,14 @@ Mode dump(
|
|||
|
||||
|
||||
TreeEntry dumpHash(
|
||||
HashAlgorithm ha,
|
||||
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
|
||||
HashAlgorithm ha,
|
||||
const SourcePath & path,
|
||||
PathFilter & filter)
|
||||
{
|
||||
std::function<DumpHook> hook;
|
||||
hook = [&](const CanonPath & path) -> TreeEntry {
|
||||
hook = [&](const SourcePath & path) -> TreeEntry {
|
||||
auto hashSink = HashSink(ha);
|
||||
auto mode = dump(accessor, path, hashSink, hook, filter);
|
||||
auto mode = dump(path, hashSink, hook, filter);
|
||||
auto hash = hashSink.finish().first;
|
||||
return {
|
||||
.mode = mode,
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
#include "hash.hh"
|
||||
#include "source-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "fs-sink.hh"
|
||||
|
||||
namespace nix::git {
|
||||
|
@ -125,7 +125,7 @@ std::optional<Mode> convertMode(SourceAccessor::Type type);
|
|||
* Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
|
||||
* the file system object with that path.
|
||||
*/
|
||||
using RestoreHook = std::pair<SourceAccessor *, CanonPath>(Hash);
|
||||
using RestoreHook = SourcePath(Hash);
|
||||
|
||||
/**
|
||||
* Wrapper around `parse` and `RestoreSink`
|
||||
|
@ -157,10 +157,10 @@ void dumpTree(
|
|||
* Note that if the child is a directory, its child in must also be so
|
||||
* processed in order to compute this information.
|
||||
*/
|
||||
using DumpHook = TreeEntry(const CanonPath & path);
|
||||
using DumpHook = TreeEntry(const SourcePath & path);
|
||||
|
||||
Mode dump(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
std::function<DumpHook> hook,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
|
@ -172,9 +172,9 @@ Mode dump(
|
|||
* A smaller wrapper around `dump`.
|
||||
*/
|
||||
TreeEntry dumpHash(
|
||||
HashAlgorithm ha,
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
HashAlgorithm ha,
|
||||
const SourcePath & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* A line from the output of `git ls-remote --symref`.
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "posix-source-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "signals.hh"
|
||||
#include "sync.hh"
|
||||
|
||||
|
@ -17,11 +18,11 @@ PosixSourceAccessor::PosixSourceAccessor()
|
|||
: PosixSourceAccessor(std::filesystem::path {})
|
||||
{ }
|
||||
|
||||
std::pair<PosixSourceAccessor, CanonPath> PosixSourceAccessor::createAtRoot(const std::filesystem::path & path)
|
||||
SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path)
|
||||
{
|
||||
std::filesystem::path path2 = absPath(path.string());
|
||||
return {
|
||||
PosixSourceAccessor { path2.root_path() },
|
||||
make_ref<PosixSourceAccessor>(path2.root_path()),
|
||||
CanonPath { path2.relative_path().string() },
|
||||
};
|
||||
}
|
||||
|
@ -166,7 +167,7 @@ void PosixSourceAccessor::assertNoSymlinks(CanonPath path)
|
|||
}
|
||||
}
|
||||
|
||||
ref<SourceAccessor> makeFSSourceAccessor()
|
||||
ref<SourceAccessor> getFSSourceAccessor()
|
||||
{
|
||||
static auto rootFS = make_ref<PosixSourceAccessor>();
|
||||
return rootFS;
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct SourcePath;
|
||||
|
||||
/**
|
||||
* A source accessor that uses the Unix filesystem.
|
||||
*/
|
||||
|
@ -53,7 +55,7 @@ struct PosixSourceAccessor : virtual SourceAccessor
|
|||
* and
|
||||
* [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path).
|
||||
*/
|
||||
static std::pair<PosixSourceAccessor, CanonPath> createAtRoot(const std::filesystem::path & path);
|
||||
static SourcePath createAtRoot(const std::filesystem::path & path);
|
||||
|
||||
private:
|
||||
|
||||
|
|
|
@ -197,10 +197,13 @@ MakeError(RestrictedPathError, Error);
|
|||
/**
|
||||
* Return an accessor for the root filesystem.
|
||||
*/
|
||||
ref<SourceAccessor> makeFSSourceAccessor();
|
||||
ref<SourceAccessor> getFSSourceAccessor();
|
||||
|
||||
/**
|
||||
* Return an accessor for the filesystem rooted at `root`.
|
||||
* Construct an accessor for the filesystem rooted at `root`. Note
|
||||
* that it is not possible to escape `root` by appending `..` path
|
||||
* elements, and that absolute symlinks are resolved relative to
|
||||
* `root`.
|
||||
*/
|
||||
ref<SourceAccessor> makeFSSourceAccessor(std::filesystem::path root);
|
||||
|
||||
|
|
|
@ -41,6 +41,11 @@ struct SourcePath
|
|||
*/
|
||||
std::string readFile() const;
|
||||
|
||||
void readFile(
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback = [](uint64_t size){}) const
|
||||
{ return accessor->readFile(path, sink, sizeCallback); }
|
||||
|
||||
/**
|
||||
* Return whether this `SourcePath` denotes a file (of any type)
|
||||
* that exists
|
||||
|
|
|
@ -184,7 +184,7 @@ static void opAdd(Strings opFlags, Strings opArgs)
|
|||
for (auto & i : opArgs) {
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i);
|
||||
cout << fmt("%s\n", store->printStorePath(store->addToStore(
|
||||
std::string(baseNameOf(i)), accessor, canonPath)));
|
||||
std::string(baseNameOf(i)), {accessor, canonPath})));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,8 +209,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
|
|||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i);
|
||||
std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(
|
||||
baseNameOf(i),
|
||||
accessor,
|
||||
canonPath,
|
||||
{accessor, canonPath},
|
||||
method,
|
||||
hashAlgo).path));
|
||||
}
|
||||
|
@ -562,8 +561,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
|||
#endif
|
||||
if (!hashGiven) {
|
||||
HashResult hash = hashPath(
|
||||
*store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) },
|
||||
|
||||
{store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }},
|
||||
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
|
||||
info->narHash = hash.first;
|
||||
info->narSize = hash.second;
|
||||
|
|
|
@ -41,9 +41,9 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
|||
|
||||
auto storePath = dryRun
|
||||
? store->computeStorePath(
|
||||
*namePart, accessor, path2, caMethod, hashAlgo, {}).first
|
||||
*namePart, {accessor, path2}, caMethod, hashAlgo, {}).first
|
||||
: store->addToStoreSlow(
|
||||
*namePart, accessor, path2, caMethod, hashAlgo, {}).path;
|
||||
*namePart, {accessor, path2}, caMethod, hashAlgo, {}).path;
|
||||
|
||||
logger->cout("%s", store->printStorePath(storePath));
|
||||
}
|
||||
|
|
|
@ -87,30 +87,29 @@ struct CmdHashBase : Command
|
|||
return std::make_unique<HashSink>(hashAlgo);
|
||||
};
|
||||
|
||||
auto [accessor_, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||
auto & accessor = accessor_;
|
||||
auto path2 = PosixSourceAccessor::createAtRoot(path);
|
||||
Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
|
||||
switch (mode) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::Recursive:
|
||||
{
|
||||
auto hashSink = makeSink();
|
||||
dumpPath(accessor, canonPath, *hashSink, (FileSerialisationMethod) mode);
|
||||
dumpPath(path2, *hashSink, (FileSerialisationMethod) mode);
|
||||
h = hashSink->finish().first;
|
||||
break;
|
||||
}
|
||||
case FileIngestionMethod::Git: {
|
||||
std::function<git::DumpHook> hook;
|
||||
hook = [&](const CanonPath & path) -> git::TreeEntry {
|
||||
hook = [&](const SourcePath & path) -> git::TreeEntry {
|
||||
auto hashSink = makeSink();
|
||||
auto mode = dump(accessor, path, *hashSink, hook);
|
||||
auto mode = dump(path, *hashSink, hook);
|
||||
auto hash = hashSink->finish().first;
|
||||
return {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
};
|
||||
};
|
||||
h = hook(canonPath).hash;
|
||||
h = hook(path2).hash;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -125,9 +125,8 @@ std::tuple<StorePath, Hash> prefetchFile(
|
|||
Activity act(*logger, lvlChatty, actUnknown,
|
||||
fmt("adding '%s' to the store", url));
|
||||
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(tmpFile);
|
||||
auto info = store->addToStoreSlow(
|
||||
*name, accessor, canonPath,
|
||||
*name, PosixSourceAccessor::createAtRoot(tmpFile),
|
||||
ingestionMethod, hashAlgo, {}, expectedHash);
|
||||
storePath = info.path;
|
||||
assert(info.ca);
|
||||
|
|
|
@ -154,8 +154,8 @@ TEST_F(GitTest, tree_write) {
|
|||
TEST_F(GitTest, both_roundrip) {
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
MemorySourceAccessor files;
|
||||
files.root = File::Directory {
|
||||
auto files = make_ref<MemorySourceAccessor>();
|
||||
files->root = File::Directory {
|
||||
.contents {
|
||||
{
|
||||
"foo",
|
||||
|
@ -189,12 +189,12 @@ TEST_F(GitTest, both_roundrip) {
|
|||
std::map<Hash, std::string> cas;
|
||||
|
||||
std::function<DumpHook> dumpHook;
|
||||
dumpHook = [&](const CanonPath & path) {
|
||||
dumpHook = [&](const SourcePath & path) {
|
||||
StringSink s;
|
||||
HashSink hashSink { HashAlgorithm::SHA1 };
|
||||
TeeSink s2 { s, hashSink };
|
||||
auto mode = dump(
|
||||
files, path, s2, dumpHook,
|
||||
path, s2, dumpHook,
|
||||
defaultPathFilter, mockXpSettings);
|
||||
auto hash = hashSink.finish().first;
|
||||
cas.insert_or_assign(hash, std::move(s.s));
|
||||
|
@ -204,11 +204,11 @@ TEST_F(GitTest, both_roundrip) {
|
|||
};
|
||||
};
|
||||
|
||||
auto root = dumpHook(CanonPath::root);
|
||||
auto root = dumpHook({files});
|
||||
|
||||
MemorySourceAccessor files2;
|
||||
auto files2 = make_ref<MemorySourceAccessor>();
|
||||
|
||||
MemorySink sinkFiles2 { files2 };
|
||||
MemorySink sinkFiles2 { *files2 };
|
||||
|
||||
std::function<void(const Path, const Hash &, BlobMode)> mkSinkHook;
|
||||
mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) {
|
||||
|
@ -229,7 +229,7 @@ TEST_F(GitTest, both_roundrip) {
|
|||
|
||||
mkSinkHook("", root.hash, BlobMode::Regular);
|
||||
|
||||
ASSERT_EQ(files, files2);
|
||||
ASSERT_EQ(*files, *files2);
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseSymrefLineWithReference) {
|
||||
|
|
Loading…
Reference in a new issue