downloadFile(): Remove the "locked" (aka "immutable") flag

This was used in only one place, namely builtins.fetchurl with an
expected hash. Since this can cause similar issues as described
in #9814 and #9905 with the "locked" flag for fetchTarball and fetchTree,
let's just remove it.

Note that if an expected hash is given and the hash algorithm is
SHA-256, then we will never do a download anyway if the resulting
store path already exists. So removing the "locked" flag will only
cause potentially unnecessary HTTP requests (subject to the tarball
TTL) for non-SHA-256 hashes.
This commit is contained in:
Eelco Dolstra 2024-04-05 16:35:12 +02:00
parent 334874d101
commit d29786f258
6 changed files with 11 additions and 13 deletions

View file

@ -473,7 +473,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
auto storePath =
unpack
? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name)
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
: fetchers::downloadFile(state.store, *url, name).storePath;
if (expectedHash) {
auto hash = unpack

View file

@ -357,7 +357,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
downloadFile(store, url, "source", headers).storePath)));
return RefInfo {
.rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1),
@ -431,7 +431,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
downloadFile(store, url, "source", headers).storePath)));
return RefInfo {
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
@ -495,7 +495,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::string refUri;
if (ref == "HEAD") {
auto file = store->toRealPath(
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
downloadFile(store, fmt("%s/HEAD", base_url), "source", headers).storePath);
std::ifstream is(file);
std::string line;
getline(is, line);
@ -511,7 +511,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::regex refRegex(refUri);
auto file = store->toRealPath(
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
downloadFile(store, fmt("%s/info/refs", base_url), "source", headers).storePath);
std::ifstream is(file);
std::string line;

View file

@ -158,7 +158,7 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
}
if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
auto storePath = downloadFile(store, path, "flake-registry.json").storePath;
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json");
path = store->toRealPath(storePath);

View file

@ -19,7 +19,6 @@ DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers)
{
// FIXME: check store
@ -101,7 +100,7 @@ DownloadFileResult downloadFile(
inAttrs,
infoAttrs,
*storePath,
locked);
false);
}
return {
@ -306,7 +305,7 @@ struct FileInputScheme : CurlInputScheme
the Nix store directly, since there is little deduplication
benefit in using the Git cache for single big files like
tarballs. */
auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName());
auto narHash = store->queryPathInfo(file.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));

View file

@ -25,7 +25,6 @@ DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
struct DownloadTarballResult

View file

@ -112,7 +112,7 @@ static void update(const StringSet & channelNames)
// We want to download the url to a file to see if it's a tarball while also checking if we
// got redirected in the process, so that we can grab the various parts of a nix channel
// definition from a consistent location if the redirect changes mid-download.
auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false);
auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)));
auto filename = store->toRealPath(result.storePath);
url = result.effectiveUrl;
@ -126,9 +126,9 @@ static void update(const StringSet & channelNames)
if (!unpacked) {
// Download the channel tarball.
try {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath);
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz").storePath);
} catch (FileTransferError & e) {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath);
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2").storePath);
}
}
// Regardless of where it came from, add the expression representing this channel to accumulated expression