Merge pull request #10414 from edolstra/remove-downloadFile-locked

downloadFile(): Remove the "locked" (aka "immutable") flag
This commit is contained in:
John Ericson 2024-04-12 17:23:53 -04:00 committed by GitHub
commit aa438b8fba
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 11 additions and 13 deletions

View file

@ -473,7 +473,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
auto storePath = auto storePath =
unpack unpack
? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name) ? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name)
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath; : fetchers::downloadFile(state.store, *url, name).storePath;
if (expectedHash) { if (expectedHash) {
auto hash = unpack auto hash = unpack

View file

@ -357,7 +357,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath))); downloadFile(store, url, "source", headers).storePath)));
return RefInfo { return RefInfo {
.rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1), .rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1),
@ -431,7 +431,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath))); downloadFile(store, url, "source", headers).storePath)));
return RefInfo { return RefInfo {
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1) .rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
@ -495,7 +495,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::string refUri; std::string refUri;
if (ref == "HEAD") { if (ref == "HEAD") {
auto file = store->toRealPath( auto file = store->toRealPath(
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath); downloadFile(store, fmt("%s/HEAD", base_url), "source", headers).storePath);
std::ifstream is(file); std::ifstream is(file);
std::string line; std::string line;
getline(is, line); getline(is, line);
@ -511,7 +511,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::regex refRegex(refUri); std::regex refRegex(refUri);
auto file = store->toRealPath( auto file = store->toRealPath(
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath); downloadFile(store, fmt("%s/info/refs", base_url), "source", headers).storePath);
std::ifstream is(file); std::ifstream is(file);
std::string line; std::string line;

View file

@ -158,7 +158,7 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
} }
if (!hasPrefix(path, "/")) { if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath; auto storePath = downloadFile(store, path, "flake-registry.json").storePath;
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json"); store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json");
path = store->toRealPath(storePath); path = store->toRealPath(storePath);

View file

@ -19,7 +19,6 @@ DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool locked,
const Headers & headers) const Headers & headers)
{ {
// FIXME: check store // FIXME: check store
@ -101,7 +100,7 @@ DownloadFileResult downloadFile(
inAttrs, inAttrs,
infoAttrs, infoAttrs,
*storePath, *storePath,
locked); false);
} }
return { return {
@ -306,7 +305,7 @@ struct FileInputScheme : CurlInputScheme
the Nix store directly, since there is little deduplication the Nix store directly, since there is little deduplication
benefit in using the Git cache for single big files like benefit in using the Git cache for single big files like
tarballs. */ tarballs. */
auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false); auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName());
auto narHash = store->queryPathInfo(file.storePath)->narHash; auto narHash = store->queryPathInfo(file.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));

View file

@ -25,7 +25,6 @@ DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool locked,
const Headers & headers = {}); const Headers & headers = {});
struct DownloadTarballResult struct DownloadTarballResult

View file

@ -112,7 +112,7 @@ static void update(const StringSet & channelNames)
// We want to download the url to a file to see if it's a tarball while also checking if we // We want to download the url to a file to see if it's a tarball while also checking if we
// got redirected in the process, so that we can grab the various parts of a nix channel // got redirected in the process, so that we can grab the various parts of a nix channel
// definition from a consistent location if the redirect changes mid-download. // definition from a consistent location if the redirect changes mid-download.
auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false); auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)));
auto filename = store->toRealPath(result.storePath); auto filename = store->toRealPath(result.storePath);
url = result.effectiveUrl; url = result.effectiveUrl;
@ -126,9 +126,9 @@ static void update(const StringSet & channelNames)
if (!unpacked) { if (!unpacked) {
// Download the channel tarball. // Download the channel tarball.
try { try {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath); filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz").storePath);
} catch (FileTransferError & e) { } catch (FileTransferError & e) {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath); filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2").storePath);
} }
} }
// Regardless of where it came from, add the expression representing this channel to accumulated expression // Regardless of where it came from, add the expression representing this channel to accumulated expression