mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-14 02:06:16 +02:00
Merge pull request #10465 from edolstra/remove-locked
Fetcher cache cleanups
This commit is contained in:
commit
da3381d51f
9 changed files with 200 additions and 214 deletions
|
@ -11,12 +11,11 @@ namespace nix::fetchers {
|
||||||
static const char * schema = R"sql(
|
static const char * schema = R"sql(
|
||||||
|
|
||||||
create table if not exists Cache (
|
create table if not exists Cache (
|
||||||
input text not null,
|
domain text not null,
|
||||||
info text not null,
|
key text not null,
|
||||||
path text not null,
|
value text not null,
|
||||||
immutable integer not null,
|
|
||||||
timestamp integer not null,
|
timestamp integer not null,
|
||||||
primary key (input)
|
primary key (domain, key)
|
||||||
);
|
);
|
||||||
)sql";
|
)sql";
|
||||||
|
|
||||||
|
@ -28,7 +27,7 @@ struct CacheImpl : Cache
|
||||||
struct State
|
struct State
|
||||||
{
|
{
|
||||||
SQLite db;
|
SQLite db;
|
||||||
SQLiteStmt add, lookup;
|
SQLiteStmt upsert, lookup;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sync<State> _state;
|
Sync<State> _state;
|
||||||
|
@ -37,137 +36,130 @@ struct CacheImpl : Cache
|
||||||
{
|
{
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
||||||
auto dbPath = getCacheDir() + "/nix/fetcher-cache-v1.sqlite";
|
auto dbPath = getCacheDir() + "/nix/fetcher-cache-v2.sqlite";
|
||||||
createDirs(dirOf(dbPath));
|
createDirs(dirOf(dbPath));
|
||||||
|
|
||||||
state->db = SQLite(dbPath);
|
state->db = SQLite(dbPath);
|
||||||
state->db.isCache();
|
state->db.isCache();
|
||||||
state->db.exec(schema);
|
state->db.exec(schema);
|
||||||
|
|
||||||
state->add.create(state->db,
|
state->upsert.create(state->db,
|
||||||
"insert or replace into Cache(input, info, path, immutable, timestamp) values (?, ?, ?, ?, ?)");
|
"insert or replace into Cache(domain, key, value, timestamp) values (?, ?, ?, ?)");
|
||||||
|
|
||||||
state->lookup.create(state->db,
|
state->lookup.create(state->db,
|
||||||
"select info, path, immutable, timestamp from Cache where input = ?");
|
"select value, timestamp from Cache where domain = ? and key = ?");
|
||||||
}
|
}
|
||||||
|
|
||||||
void upsert(
|
void upsert(
|
||||||
const Attrs & inAttrs,
|
const Key & key,
|
||||||
const Attrs & infoAttrs) override
|
const Attrs & value) override
|
||||||
{
|
{
|
||||||
_state.lock()->add.use()
|
_state.lock()->upsert.use()
|
||||||
(attrsToJSON(inAttrs).dump())
|
(key.first)
|
||||||
(attrsToJSON(infoAttrs).dump())
|
(attrsToJSON(key.second).dump())
|
||||||
("") // no path
|
(attrsToJSON(value).dump())
|
||||||
(false)
|
|
||||||
(time(0)).exec();
|
(time(0)).exec();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Attrs> lookup(const Attrs & inAttrs) override
|
std::optional<Attrs> lookup(
|
||||||
|
const Key & key) override
|
||||||
{
|
{
|
||||||
if (auto res = lookupExpired(inAttrs))
|
if (auto res = lookupExpired(key))
|
||||||
return std::move(res->infoAttrs);
|
return std::move(res->value);
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Attrs> lookupWithTTL(const Attrs & inAttrs) override
|
std::optional<Attrs> lookupWithTTL(
|
||||||
|
const Key & key) override
|
||||||
{
|
{
|
||||||
if (auto res = lookupExpired(inAttrs)) {
|
if (auto res = lookupExpired(key)) {
|
||||||
if (!res->expired)
|
if (!res->expired)
|
||||||
return std::move(res->infoAttrs);
|
return std::move(res->value);
|
||||||
debug("ignoring expired cache entry '%s'",
|
debug("ignoring expired cache entry '%s:%s'",
|
||||||
attrsToJSON(inAttrs).dump());
|
key.first, attrsToJSON(key.second).dump());
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<Result2> lookupExpired(const Attrs & inAttrs) override
|
|
||||||
{
|
|
||||||
auto state(_state.lock());
|
|
||||||
|
|
||||||
auto inAttrsJSON = attrsToJSON(inAttrs).dump();
|
|
||||||
|
|
||||||
auto stmt(state->lookup.use()(inAttrsJSON));
|
|
||||||
if (!stmt.next()) {
|
|
||||||
debug("did not find cache entry for '%s'", inAttrsJSON);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
auto infoJSON = stmt.getStr(0);
|
|
||||||
auto locked = stmt.getInt(2) != 0;
|
|
||||||
auto timestamp = stmt.getInt(3);
|
|
||||||
|
|
||||||
debug("using cache entry '%s' -> '%s'", inAttrsJSON, infoJSON);
|
|
||||||
|
|
||||||
return Result2 {
|
|
||||||
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
|
||||||
.infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
void add(
|
|
||||||
Store & store,
|
|
||||||
const Attrs & inAttrs,
|
|
||||||
const Attrs & infoAttrs,
|
|
||||||
const StorePath & storePath,
|
|
||||||
bool locked) override
|
|
||||||
{
|
|
||||||
_state.lock()->add.use()
|
|
||||||
(attrsToJSON(inAttrs).dump())
|
|
||||||
(attrsToJSON(infoAttrs).dump())
|
|
||||||
(store.printStorePath(storePath))
|
|
||||||
(locked)
|
|
||||||
(time(0)).exec();
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<std::pair<Attrs, StorePath>> lookup(
|
|
||||||
Store & store,
|
|
||||||
const Attrs & inAttrs) override
|
|
||||||
{
|
|
||||||
if (auto res = lookupExpired(store, inAttrs)) {
|
|
||||||
if (!res->expired)
|
|
||||||
return std::make_pair(std::move(res->infoAttrs), std::move(res->storePath));
|
|
||||||
debug("ignoring expired cache entry '%s'",
|
|
||||||
attrsToJSON(inAttrs).dump());
|
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Result> lookupExpired(
|
std::optional<Result> lookupExpired(
|
||||||
Store & store,
|
const Key & key) override
|
||||||
const Attrs & inAttrs) override
|
|
||||||
{
|
{
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
||||||
auto inAttrsJSON = attrsToJSON(inAttrs).dump();
|
auto keyJSON = attrsToJSON(key.second).dump();
|
||||||
|
|
||||||
auto stmt(state->lookup.use()(inAttrsJSON));
|
auto stmt(state->lookup.use()(key.first)(keyJSON));
|
||||||
if (!stmt.next()) {
|
if (!stmt.next()) {
|
||||||
debug("did not find cache entry for '%s'", inAttrsJSON);
|
debug("did not find cache entry for '%s:%s'", key.first, keyJSON);
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto infoJSON = stmt.getStr(0);
|
auto valueJSON = stmt.getStr(0);
|
||||||
auto storePath = store.parseStorePath(stmt.getStr(1));
|
auto timestamp = stmt.getInt(1);
|
||||||
auto locked = stmt.getInt(2) != 0;
|
|
||||||
auto timestamp = stmt.getInt(3);
|
|
||||||
|
|
||||||
store.addTempRoot(storePath);
|
debug("using cache entry '%s:%s' -> '%s'", key.first, keyJSON, valueJSON);
|
||||||
if (!store.isValidPath(storePath)) {
|
|
||||||
// FIXME: we could try to substitute 'storePath'.
|
|
||||||
debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
debug("using cache entry '%s' -> '%s', '%s'",
|
|
||||||
inAttrsJSON, infoJSON, store.printStorePath(storePath));
|
|
||||||
|
|
||||||
return Result {
|
return Result {
|
||||||
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
.expired = settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0),
|
||||||
.infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)),
|
.value = jsonToAttrs(nlohmann::json::parse(valueJSON)),
|
||||||
.storePath = std::move(storePath)
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void upsert(
|
||||||
|
Key key,
|
||||||
|
Store & store,
|
||||||
|
Attrs value,
|
||||||
|
const StorePath & storePath)
|
||||||
|
{
|
||||||
|
/* Add the store prefix to the cache key to handle multiple
|
||||||
|
store prefixes. */
|
||||||
|
key.second.insert_or_assign("store", store.storeDir);
|
||||||
|
|
||||||
|
value.insert_or_assign("storePath", (std::string) storePath.to_string());
|
||||||
|
|
||||||
|
upsert(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<ResultWithStorePath> lookupStorePath(
|
||||||
|
Key key,
|
||||||
|
Store & store) override
|
||||||
|
{
|
||||||
|
key.second.insert_or_assign("store", store.storeDir);
|
||||||
|
|
||||||
|
auto res = lookupExpired(key);
|
||||||
|
if (!res) return std::nullopt;
|
||||||
|
|
||||||
|
auto storePathS = getStrAttr(res->value, "storePath");
|
||||||
|
res->value.erase("storePath");
|
||||||
|
|
||||||
|
ResultWithStorePath res2(*res, StorePath(storePathS));
|
||||||
|
|
||||||
|
store.addTempRoot(res2.storePath);
|
||||||
|
if (!store.isValidPath(res2.storePath)) {
|
||||||
|
// FIXME: we could try to substitute 'storePath'.
|
||||||
|
debug("ignoring disappeared cache entry '%s:%s' -> '%s'",
|
||||||
|
key.first,
|
||||||
|
attrsToJSON(key.second).dump(),
|
||||||
|
store.printStorePath(res2.storePath));
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("using cache entry '%s:%s' -> '%s', '%s'",
|
||||||
|
key.first,
|
||||||
|
attrsToJSON(key.second).dump(),
|
||||||
|
attrsToJSON(res2.value).dump(),
|
||||||
|
store.printStorePath(res2.storePath));
|
||||||
|
|
||||||
|
return res2;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<ResultWithStorePath> lookupStorePathWithTTL(
|
||||||
|
Key key,
|
||||||
|
Store & store) override
|
||||||
|
{
|
||||||
|
auto res = lookupStorePath(std::move(key), store);
|
||||||
|
return res && !res->expired ? res : std::nullopt;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<Cache> getCache()
|
ref<Cache> getCache()
|
||||||
|
|
|
@ -15,61 +15,80 @@ struct Cache
|
||||||
virtual ~Cache() { }
|
virtual ~Cache() { }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a value to the cache. The cache is an arbitrary mapping of
|
* A domain is a partition of the key/value cache for a particular
|
||||||
* Attrs to Attrs.
|
* purpose, e.g. git revision to revcount.
|
||||||
|
*/
|
||||||
|
using Domain = std::string_view;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A cache key is a domain and an arbitrary set of attributes.
|
||||||
|
*/
|
||||||
|
using Key = std::pair<Domain, Attrs>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a key/value pair to the cache.
|
||||||
*/
|
*/
|
||||||
virtual void upsert(
|
virtual void upsert(
|
||||||
const Attrs & inAttrs,
|
const Key & key,
|
||||||
const Attrs & infoAttrs) = 0;
|
const Attrs & value) = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Look up a key with infinite TTL.
|
* Look up a key with infinite TTL.
|
||||||
*/
|
*/
|
||||||
virtual std::optional<Attrs> lookup(
|
virtual std::optional<Attrs> lookup(
|
||||||
const Attrs & inAttrs) = 0;
|
const Key & key) = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Look up a key. Return nothing if its TTL has exceeded
|
* Look up a key. Return nothing if its TTL has exceeded
|
||||||
* `settings.tarballTTL`.
|
* `settings.tarballTTL`.
|
||||||
*/
|
*/
|
||||||
virtual std::optional<Attrs> lookupWithTTL(
|
virtual std::optional<Attrs> lookupWithTTL(
|
||||||
const Attrs & inAttrs) = 0;
|
const Key & key) = 0;
|
||||||
|
|
||||||
struct Result2
|
struct Result
|
||||||
{
|
{
|
||||||
bool expired = false;
|
bool expired = false;
|
||||||
Attrs infoAttrs;
|
Attrs value;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Look up a key. Return a bool denoting whether its TTL has
|
* Look up a key. Return a bool denoting whether its TTL has
|
||||||
* exceeded `settings.tarballTTL`.
|
* exceeded `settings.tarballTTL`.
|
||||||
*/
|
*/
|
||||||
virtual std::optional<Result2> lookupExpired(
|
virtual std::optional<Result> lookupExpired(
|
||||||
const Attrs & inAttrs) = 0;
|
const Key & key) = 0;
|
||||||
|
|
||||||
/* Old cache for things that have a store path. */
|
/**
|
||||||
virtual void add(
|
* Insert a cache entry that has a store path associated with
|
||||||
|
* it. Such cache entries are always considered stale if the
|
||||||
|
* associated store path is invalid.
|
||||||
|
*/
|
||||||
|
virtual void upsert(
|
||||||
|
Key key,
|
||||||
Store & store,
|
Store & store,
|
||||||
const Attrs & inAttrs,
|
Attrs value,
|
||||||
const Attrs & infoAttrs,
|
const StorePath & storePath) = 0;
|
||||||
const StorePath & storePath,
|
|
||||||
bool locked) = 0;
|
|
||||||
|
|
||||||
virtual std::optional<std::pair<Attrs, StorePath>> lookup(
|
struct ResultWithStorePath : Result
|
||||||
Store & store,
|
|
||||||
const Attrs & inAttrs) = 0;
|
|
||||||
|
|
||||||
struct Result
|
|
||||||
{
|
{
|
||||||
bool expired = false;
|
|
||||||
Attrs infoAttrs;
|
|
||||||
StorePath storePath;
|
StorePath storePath;
|
||||||
};
|
};
|
||||||
|
|
||||||
virtual std::optional<Result> lookupExpired(
|
/**
|
||||||
Store & store,
|
* Look up a store path in the cache. The returned store path will
|
||||||
const Attrs & inAttrs) = 0;
|
* be valid, but it may be expired.
|
||||||
|
*/
|
||||||
|
virtual std::optional<ResultWithStorePath> lookupStorePath(
|
||||||
|
Key key,
|
||||||
|
Store & store) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Look up a store path in the cache. Return nothing if its TTL
|
||||||
|
* has exceeded `settings.tarballTTL`.
|
||||||
|
*/
|
||||||
|
virtual std::optional<ResultWithStorePath> lookupStorePathWithTTL(
|
||||||
|
Key key,
|
||||||
|
Store & store) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<Cache> getCache();
|
ref<Cache> getCache();
|
||||||
|
|
|
@ -16,20 +16,18 @@ StorePath fetchToStore(
|
||||||
// FIXME: add an optimisation for the case where the accessor is
|
// FIXME: add an optimisation for the case where the accessor is
|
||||||
// a `PosixSourceAccessor` pointing to a store path.
|
// a `PosixSourceAccessor` pointing to a store path.
|
||||||
|
|
||||||
std::optional<fetchers::Attrs> cacheKey;
|
std::optional<fetchers::Cache::Key> cacheKey;
|
||||||
|
|
||||||
if (!filter && path.accessor->fingerprint) {
|
if (!filter && path.accessor->fingerprint) {
|
||||||
cacheKey = fetchers::Attrs{
|
cacheKey = fetchers::Cache::Key{"fetchToStore", {
|
||||||
{"_what", "fetchToStore"},
|
|
||||||
{"store", store.storeDir},
|
|
||||||
{"name", std::string{name}},
|
{"name", std::string{name}},
|
||||||
{"fingerprint", *path.accessor->fingerprint},
|
{"fingerprint", *path.accessor->fingerprint},
|
||||||
{"method", std::string{method.render()}},
|
{"method", std::string{method.render()}},
|
||||||
{"path", path.path.abs()}
|
{"path", path.path.abs()}
|
||||||
};
|
}};
|
||||||
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
|
if (auto res = fetchers::getCache()->lookupStorePath(*cacheKey, store)) {
|
||||||
debug("store path cache hit for '%s'", path);
|
debug("store path cache hit for '%s'", path);
|
||||||
return res->second;
|
return res->storePath;
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
debug("source path '%s' is uncacheable", path);
|
debug("source path '%s' is uncacheable", path);
|
||||||
|
@ -47,10 +45,9 @@ StorePath fetchToStore(
|
||||||
name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||||
|
|
||||||
if (cacheKey && mode == FetchMode::Copy)
|
if (cacheKey && mode == FetchMode::Copy)
|
||||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
fetchers::getCache()->upsert(*cacheKey, store, {}, storePath);
|
||||||
|
|
||||||
return storePath;
|
return storePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -452,7 +452,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
{
|
{
|
||||||
auto accessor = getAccessor(treeHash, false);
|
auto accessor = getAccessor(treeHash, false);
|
||||||
|
|
||||||
fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
|
fetchers::Cache::Key cacheKey{"treeHashToNarHash", {{"treeHash", treeHash.gitRev()}}};
|
||||||
|
|
||||||
if (auto res = fetchers::getCache()->lookup(cacheKey))
|
if (auto res = fetchers::getCache()->lookup(cacheKey))
|
||||||
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
|
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
|
||||||
|
|
|
@ -225,8 +225,8 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
|
|
||||||
auto cache = getCache();
|
auto cache = getCache();
|
||||||
|
|
||||||
Attrs treeHashKey{{"_what", "gitRevToTreeHash"}, {"rev", rev->gitRev()}};
|
Cache::Key treeHashKey{"gitRevToTreeHash", {{"rev", rev->gitRev()}}};
|
||||||
Attrs lastModifiedKey{{"_what", "gitRevToLastModified"}, {"rev", rev->gitRev()}};
|
Cache::Key lastModifiedKey{"gitRevToLastModified", {{"rev", rev->gitRev()}}};
|
||||||
|
|
||||||
if (auto treeHashAttrs = cache->lookup(treeHashKey)) {
|
if (auto treeHashAttrs = cache->lookup(treeHashKey)) {
|
||||||
if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) {
|
if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) {
|
||||||
|
|
|
@ -22,21 +22,20 @@ DownloadFileResult downloadFile(
|
||||||
{
|
{
|
||||||
// FIXME: check store
|
// FIXME: check store
|
||||||
|
|
||||||
Attrs inAttrs({
|
Cache::Key key{"file", {{
|
||||||
{"type", "file"},
|
|
||||||
{"url", url},
|
{"url", url},
|
||||||
{"name", name},
|
{"name", name},
|
||||||
});
|
}}};
|
||||||
|
|
||||||
auto cached = getCache()->lookupExpired(*store, inAttrs);
|
auto cached = getCache()->lookupStorePath(key, *store);
|
||||||
|
|
||||||
auto useCached = [&]() -> DownloadFileResult
|
auto useCached = [&]() -> DownloadFileResult
|
||||||
{
|
{
|
||||||
return {
|
return {
|
||||||
.storePath = std::move(cached->storePath),
|
.storePath = std::move(cached->storePath),
|
||||||
.etag = getStrAttr(cached->infoAttrs, "etag"),
|
.etag = getStrAttr(cached->value, "etag"),
|
||||||
.effectiveUrl = getStrAttr(cached->infoAttrs, "url"),
|
.effectiveUrl = getStrAttr(cached->value, "url"),
|
||||||
.immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
|
.immutableUrl = maybeGetStrAttr(cached->value, "immutableUrl"),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -46,7 +45,7 @@ DownloadFileResult downloadFile(
|
||||||
FileTransferRequest request(url);
|
FileTransferRequest request(url);
|
||||||
request.headers = headers;
|
request.headers = headers;
|
||||||
if (cached)
|
if (cached)
|
||||||
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
|
request.expectedETag = getStrAttr(cached->value, "etag");
|
||||||
FileTransferResult res;
|
FileTransferResult res;
|
||||||
try {
|
try {
|
||||||
res = getFileTransfer()->download(request);
|
res = getFileTransfer()->download(request);
|
||||||
|
@ -92,14 +91,9 @@ DownloadFileResult downloadFile(
|
||||||
|
|
||||||
/* Cache metadata for all URLs in the redirect chain. */
|
/* Cache metadata for all URLs in the redirect chain. */
|
||||||
for (auto & url : res.urls) {
|
for (auto & url : res.urls) {
|
||||||
inAttrs.insert_or_assign("url", url);
|
key.second.insert_or_assign("url", url);
|
||||||
infoAttrs.insert_or_assign("url", *res.urls.rbegin());
|
infoAttrs.insert_or_assign("url", *res.urls.rbegin());
|
||||||
getCache()->add(
|
getCache()->upsert(key, *store, infoAttrs, *storePath);
|
||||||
*store,
|
|
||||||
inAttrs,
|
|
||||||
infoAttrs,
|
|
||||||
*storePath,
|
|
||||||
false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -114,12 +108,9 @@ DownloadTarballResult downloadTarball(
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const Headers & headers)
|
const Headers & headers)
|
||||||
{
|
{
|
||||||
Attrs inAttrs({
|
Cache::Key cacheKey{"tarball", {{"url", url}}};
|
||||||
{"_what", "tarballCache"},
|
|
||||||
{"url", url},
|
|
||||||
});
|
|
||||||
|
|
||||||
auto cached = getCache()->lookupExpired(inAttrs);
|
auto cached = getCache()->lookupExpired(cacheKey);
|
||||||
|
|
||||||
auto attrsToResult = [&](const Attrs & infoAttrs)
|
auto attrsToResult = [&](const Attrs & infoAttrs)
|
||||||
{
|
{
|
||||||
|
@ -132,19 +123,19 @@ DownloadTarballResult downloadTarball(
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
if (cached && !getTarballCache()->hasObject(getRevAttr(cached->infoAttrs, "treeHash")))
|
if (cached && !getTarballCache()->hasObject(getRevAttr(cached->value, "treeHash")))
|
||||||
cached.reset();
|
cached.reset();
|
||||||
|
|
||||||
if (cached && !cached->expired)
|
if (cached && !cached->expired)
|
||||||
/* We previously downloaded this tarball and it's younger than
|
/* We previously downloaded this tarball and it's younger than
|
||||||
`tarballTtl`, so no need to check the server. */
|
`tarballTtl`, so no need to check the server. */
|
||||||
return attrsToResult(cached->infoAttrs);
|
return attrsToResult(cached->value);
|
||||||
|
|
||||||
auto _res = std::make_shared<Sync<FileTransferResult>>();
|
auto _res = std::make_shared<Sync<FileTransferResult>>();
|
||||||
|
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
FileTransferRequest req(url);
|
FileTransferRequest req(url);
|
||||||
req.expectedETag = cached ? getStrAttr(cached->infoAttrs, "etag") : "";
|
req.expectedETag = cached ? getStrAttr(cached->value, "etag") : "";
|
||||||
getFileTransfer()->download(std::move(req), sink,
|
getFileTransfer()->download(std::move(req), sink,
|
||||||
[_res](FileTransferResult r)
|
[_res](FileTransferResult r)
|
||||||
{
|
{
|
||||||
|
@ -167,7 +158,7 @@ DownloadTarballResult downloadTarball(
|
||||||
if (res->cached) {
|
if (res->cached) {
|
||||||
/* The server says that the previously downloaded version is
|
/* The server says that the previously downloaded version is
|
||||||
still current. */
|
still current. */
|
||||||
infoAttrs = cached->infoAttrs;
|
infoAttrs = cached->value;
|
||||||
} else {
|
} else {
|
||||||
infoAttrs.insert_or_assign("etag", res->etag);
|
infoAttrs.insert_or_assign("etag", res->etag);
|
||||||
infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev());
|
infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev());
|
||||||
|
@ -178,8 +169,8 @@ DownloadTarballResult downloadTarball(
|
||||||
|
|
||||||
/* Insert a cache entry for every URL in the redirect chain. */
|
/* Insert a cache entry for every URL in the redirect chain. */
|
||||||
for (auto & url : res->urls) {
|
for (auto & url : res->urls) {
|
||||||
inAttrs.insert_or_assign("url", url);
|
cacheKey.second.insert_or_assign("url", url);
|
||||||
getCache()->upsert(inAttrs, infoAttrs);
|
getCache()->upsert(cacheKey, infoAttrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: add a cache entry for immutableUrl? That could allow
|
// FIXME: add a cache entry for immutableUrl? That could allow
|
||||||
|
|
|
@ -426,7 +426,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
||||||
{
|
{
|
||||||
Attrs key{{"_what", "gitLastModified"}, {"rev", rev.gitRev()}};
|
Cache::Key key{"gitLastModified", {{"rev", rev.gitRev()}}};
|
||||||
|
|
||||||
auto cache = getCache();
|
auto cache = getCache();
|
||||||
|
|
||||||
|
@ -435,14 +435,14 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev);
|
auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev);
|
||||||
|
|
||||||
cache->upsert(key, Attrs{{"lastModified", lastModified}});
|
cache->upsert(key, {{"lastModified", lastModified}});
|
||||||
|
|
||||||
return lastModified;
|
return lastModified;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
||||||
{
|
{
|
||||||
Attrs key{{"_what", "gitRevCount"}, {"rev", rev.gitRev()}};
|
Cache::Key key{"gitRevCount", {{"rev", rev.gitRev()}}};
|
||||||
|
|
||||||
auto cache = getCache();
|
auto cache = getCache();
|
||||||
|
|
||||||
|
|
|
@ -222,22 +222,16 @@ struct MercurialInputScheme : InputScheme
|
||||||
|
|
||||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||||
|
|
||||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
|
auto revInfoKey = [&](const Hash & rev)
|
||||||
{
|
{
|
||||||
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
|
if (rev.algo != HashAlgorithm::SHA1)
|
||||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", rev.to_string(HashFormat::Base16, true));
|
||||||
};
|
|
||||||
|
|
||||||
|
return Cache::Key{"hgRev", {
|
||||||
auto getLockedAttrs = [&]()
|
{"store", store->storeDir},
|
||||||
{
|
|
||||||
checkHashAlgorithm(input.getRev());
|
|
||||||
|
|
||||||
return Attrs({
|
|
||||||
{"type", "hg"},
|
|
||||||
{"name", name},
|
{"name", name},
|
||||||
{"rev", input.getRev()->gitRev()},
|
{"rev", input.getRev()->gitRev()}
|
||||||
});
|
}};
|
||||||
};
|
};
|
||||||
|
|
||||||
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
|
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
|
||||||
|
@ -248,26 +242,21 @@ struct MercurialInputScheme : InputScheme
|
||||||
return storePath;
|
return storePath;
|
||||||
};
|
};
|
||||||
|
|
||||||
if (input.getRev()) {
|
/* Check the cache for the most recent rev for this URL/ref. */
|
||||||
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
|
Cache::Key refToRevKey{"hgRefToRev", {
|
||||||
return makeResult(res->first, std::move(res->second));
|
{"url", actualUrl},
|
||||||
|
{"ref", *input.getRef()}
|
||||||
|
}};
|
||||||
|
|
||||||
|
if (!input.getRev()) {
|
||||||
|
if (auto res = getCache()->lookupWithTTL(refToRevKey))
|
||||||
|
input.attrs.insert_or_assign("rev", getRevAttr(*res, "rev").gitRev());
|
||||||
}
|
}
|
||||||
|
|
||||||
auto revOrRef = input.getRev() ? input.getRev()->gitRev() : *input.getRef();
|
/* If we have a rev, check if we have a cached store path. */
|
||||||
|
if (auto rev = input.getRev()) {
|
||||||
Attrs unlockedAttrs({
|
if (auto res = getCache()->lookupStorePath(revInfoKey(*rev), *store))
|
||||||
{"type", "hg"},
|
return makeResult(res->value, res->storePath);
|
||||||
{"name", name},
|
|
||||||
{"url", actualUrl},
|
|
||||||
{"ref", *input.getRef()},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
|
|
||||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
|
|
||||||
if (!input.getRev() || input.getRev() == rev2) {
|
|
||||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
|
||||||
return makeResult(res->first, std::move(res->second));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
||||||
|
@ -300,45 +289,42 @@ struct MercurialInputScheme : InputScheme
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Fetch the remote rev or ref. */
|
||||||
auto tokens = tokenizeString<std::vector<std::string>>(
|
auto tokens = tokenizeString<std::vector<std::string>>(
|
||||||
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
runHg({
|
||||||
|
"log", "-R", cacheDir,
|
||||||
|
"-r", input.getRev() ? input.getRev()->gitRev() : *input.getRef(),
|
||||||
|
"--template", "{node} {rev} {branch}"
|
||||||
|
}));
|
||||||
assert(tokens.size() == 3);
|
assert(tokens.size() == 3);
|
||||||
|
|
||||||
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
|
auto rev = Hash::parseAny(tokens[0], HashAlgorithm::SHA1);
|
||||||
|
input.attrs.insert_or_assign("rev", rev.gitRev());
|
||||||
auto revCount = std::stoull(tokens[1]);
|
auto revCount = std::stoull(tokens[1]);
|
||||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
|
/* Now that we have the rev, check the cache again for a
|
||||||
return makeResult(res->first, std::move(res->second));
|
cached store path. */
|
||||||
|
if (auto res = getCache()->lookupStorePath(revInfoKey(rev), *store))
|
||||||
|
return makeResult(res->value, res->storePath);
|
||||||
|
|
||||||
Path tmpDir = createTempDir();
|
Path tmpDir = createTempDir();
|
||||||
AutoDelete delTmpDir(tmpDir, true);
|
AutoDelete delTmpDir(tmpDir, true);
|
||||||
|
|
||||||
runHg({ "archive", "-R", cacheDir, "-r", input.getRev()->gitRev(), tmpDir });
|
runHg({ "archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir });
|
||||||
|
|
||||||
deletePath(tmpDir + "/.hg_archival.txt");
|
deletePath(tmpDir + "/.hg_archival.txt");
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, {getFSSourceAccessor(), CanonPath(tmpDir)});
|
auto storePath = store->addToStore(name, {getFSSourceAccessor(), CanonPath(tmpDir)});
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"rev", input.getRev()->gitRev()},
|
|
||||||
{"revCount", (uint64_t) revCount},
|
{"revCount", (uint64_t) revCount},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!origRev)
|
if (!origRev)
|
||||||
getCache()->add(
|
getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}});
|
||||||
*store,
|
|
||||||
unlockedAttrs,
|
|
||||||
infoAttrs,
|
|
||||||
storePath,
|
|
||||||
false);
|
|
||||||
|
|
||||||
getCache()->add(
|
getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath);
|
||||||
*store,
|
|
||||||
getLockedAttrs(),
|
|
||||||
infoAttrs,
|
|
||||||
storePath,
|
|
||||||
true);
|
|
||||||
|
|
||||||
return makeResult(infoAttrs, std::move(storePath));
|
return makeResult(infoAttrs, std::move(storePath));
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,6 +101,7 @@ path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchMercurial file:
|
||||||
[[ $path2 = $path4 ]]
|
[[ $path2 = $path4 ]]
|
||||||
|
|
||||||
echo paris > $repo/hello
|
echo paris > $repo/hello
|
||||||
|
|
||||||
# Passing a `name` argument should be reflected in the output path
|
# Passing a `name` argument should be reflected in the output path
|
||||||
path5=$(nix eval -vvvvv --impure --refresh --raw --expr "(builtins.fetchMercurial { url = \"file://$repo\"; name = \"foo\"; } ).outPath")
|
path5=$(nix eval -vvvvv --impure --refresh --raw --expr "(builtins.fetchMercurial { url = \"file://$repo\"; name = \"foo\"; } ).outPath")
|
||||||
[[ $path5 =~ -foo$ ]]
|
[[ $path5 =~ -foo$ ]]
|
||||||
|
|
Loading…
Reference in a new issue